mirror of
https://github.com/zadam/trilium.git
synced 2026-03-23 04:10:16 +01:00
Compare commits
24 Commits
feat/add-m
...
feat/searc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bd25ae77fc | ||
|
|
9aec8be1c0 | ||
|
|
90ac727250 | ||
|
|
5bc9840825 | ||
|
|
48dd93b94b | ||
|
|
ac231374f6 | ||
|
|
87fc4e1281 | ||
|
|
8fd2cb39c1 | ||
|
|
24a01aefe2 | ||
|
|
06fb9c0a6b | ||
|
|
bc0942180e | ||
|
|
f358563c27 | ||
|
|
dcaebeea83 | ||
|
|
ac13af73c5 | ||
|
|
ba529d2721 | ||
|
|
f23a7b4842 | ||
|
|
5718631889 | ||
|
|
da3d71d21e | ||
|
|
b533546236 | ||
|
|
1c148f407c | ||
|
|
9403efa9a1 | ||
|
|
6a06fc7995 | ||
|
|
77733ce205 | ||
|
|
585b6ccd3e |
2
.github/actions/build-server/action.yml
vendored
2
.github/actions/build-server/action.yml
vendored
@@ -8,7 +8,7 @@ inputs:
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: pnpm/action-setup@v5
|
||||
- uses: pnpm/action-setup@v4
|
||||
- name: Set up node & dependencies
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
|
||||
2
.github/actions/report-size/action.yml
vendored
2
.github/actions/report-size/action.yml
vendored
@@ -69,7 +69,7 @@ runs:
|
||||
|
||||
# Post github action comment
|
||||
- name: Post comment
|
||||
uses: marocchino/sticky-pull-request-comment@v3
|
||||
uses: marocchino/sticky-pull-request-comment@v2
|
||||
if: ${{ steps.bundleSize.outputs.hasDifferences == 'true' }} # post only in case of changes
|
||||
with:
|
||||
number: ${{ github.event.pull_request.number }}
|
||||
|
||||
2
.github/workflows/deploy-docs.yml
vendored
2
.github/workflows/deploy-docs.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v5
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
|
||||
10
.github/workflows/dev.yml
vendored
10
.github/workflows/dev.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- uses: pnpm/action-setup@v5
|
||||
- uses: pnpm/action-setup@v4
|
||||
- name: Set up node & dependencies
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
@@ -74,7 +74,7 @@ jobs:
|
||||
- test_dev
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: pnpm/action-setup@v5
|
||||
- uses: pnpm/action-setup@v4
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
- name: Update build info
|
||||
@@ -89,7 +89,7 @@ jobs:
|
||||
key: ${{ secrets.RELATIVE_CI_CLIENT_KEY }}
|
||||
- name: Trigger server build
|
||||
run: pnpm run server:build
|
||||
- uses: docker/setup-buildx-action@v4
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- uses: docker/build-push-action@v7
|
||||
with:
|
||||
context: apps/server
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- uses: pnpm/action-setup@v5
|
||||
- uses: pnpm/action-setup@v4
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
run: echo "TEST_TAG=${TEST_TAG,,}" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v4
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and export to Docker
|
||||
uses: docker/build-push-action@v7
|
||||
|
||||
2
.github/workflows/i18n.yml
vendored
2
.github/workflows/i18n.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: pnpm/action-setup@v5
|
||||
- uses: pnpm/action-setup@v4
|
||||
- name: Set up node & dependencies
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
|
||||
8
.github/workflows/main-docker.yml
vendored
8
.github/workflows/main-docker.yml
vendored
@@ -40,9 +40,9 @@ jobs:
|
||||
run: echo "TEST_TAG=${TEST_TAG,,}" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v4
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: pnpm/action-setup@v5
|
||||
- uses: pnpm/action-setup@v4
|
||||
- name: Set up node & dependencies
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
@@ -142,7 +142,7 @@ jobs:
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
- uses: pnpm/action-setup@v5
|
||||
- uses: pnpm/action-setup@v4
|
||||
- name: Set up node & dependencies
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
@@ -178,7 +178,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v4
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v4
|
||||
|
||||
6
.github/workflows/nightly.yml
vendored
6
.github/workflows/nightly.yml
vendored
@@ -61,7 +61,7 @@ jobs:
|
||||
runs-on: ${{ matrix.os.image }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: pnpm/action-setup@v5
|
||||
- uses: pnpm/action-setup@v4
|
||||
- name: Set up node & dependencies
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGN_KEY }}
|
||||
|
||||
- name: Publish release
|
||||
uses: softprops/action-gh-release@v2.6.1
|
||||
uses: softprops/action-gh-release@v2.5.0
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
with:
|
||||
make_latest: false
|
||||
@@ -132,7 +132,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
|
||||
- name: Publish release
|
||||
uses: softprops/action-gh-release@v2.6.1
|
||||
uses: softprops/action-gh-release@v2.5.0
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
with:
|
||||
make_latest: false
|
||||
|
||||
2
.github/workflows/playwright.yml
vendored
2
.github/workflows/playwright.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
filter: tree:0
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: pnpm/action-setup@v5
|
||||
- uses: pnpm/action-setup@v4
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24
|
||||
|
||||
6
.github/workflows/release.yml
vendored
6
.github/workflows/release.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- uses: pnpm/action-setup@v5
|
||||
- uses: pnpm/action-setup@v4
|
||||
- name: Set up node & dependencies
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
runs-on: ${{ matrix.os.image }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: pnpm/action-setup@v5
|
||||
- uses: pnpm/action-setup@v4
|
||||
- name: Set up node & dependencies
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
@@ -150,7 +150,7 @@ jobs:
|
||||
path: upload
|
||||
|
||||
- name: Publish stable release
|
||||
uses: softprops/action-gh-release@v2.6.1
|
||||
uses: softprops/action-gh-release@v2.5.0
|
||||
with:
|
||||
draft: false
|
||||
body_path: docs/Release Notes/Release Notes/${{ github.ref_name }}.md
|
||||
|
||||
4
.github/workflows/web-clipper.yml
vendored
4
.github/workflows/web-clipper.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: pnpm/action-setup@v5
|
||||
- uses: pnpm/action-setup@v4
|
||||
- name: Set up node & dependencies
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
compression-level: 0
|
||||
|
||||
- name: Release web clipper extension
|
||||
uses: softprops/action-gh-release@v2.6.1
|
||||
uses: softprops/action-gh-release@v2.5.0
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/web-clipper-v') }}
|
||||
with:
|
||||
draft: false
|
||||
|
||||
2
.github/workflows/website.yml
vendored
2
.github/workflows/website.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: pnpm/action-setup@v5
|
||||
- uses: pnpm/action-setup@v4
|
||||
- name: Set up node & dependencies
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -46,6 +46,7 @@ upload
|
||||
|
||||
/.direnv
|
||||
/result
|
||||
.svelte-kit
|
||||
|
||||
# docs
|
||||
site/
|
||||
|
||||
@@ -14,9 +14,9 @@
|
||||
"keywords": [],
|
||||
"author": "Elian Doran <contact@eliandoran.me>",
|
||||
"license": "AGPL-3.0-only",
|
||||
"packageManager": "pnpm@10.32.1",
|
||||
"packageManager": "pnpm@10.32.0",
|
||||
"devDependencies": {
|
||||
"@redocly/cli": "2.24.1",
|
||||
"@redocly/cli": "2.20.2",
|
||||
"archiver": "7.0.1",
|
||||
"fs-extra": "11.3.4",
|
||||
"js-yaml": "4.1.1",
|
||||
|
||||
@@ -35,15 +35,15 @@
|
||||
"@triliumnext/highlightjs": "workspace:*",
|
||||
"@triliumnext/share-theme": "workspace:*",
|
||||
"@triliumnext/split.js": "workspace:*",
|
||||
"@univerjs/preset-sheets-conditional-formatting": "0.18.0",
|
||||
"@univerjs/preset-sheets-core": "0.18.0",
|
||||
"@univerjs/preset-sheets-data-validation": "0.18.0",
|
||||
"@univerjs/preset-sheets-filter": "0.18.0",
|
||||
"@univerjs/preset-sheets-find-replace": "0.18.0",
|
||||
"@univerjs/preset-sheets-note": "0.18.0",
|
||||
"@univerjs/preset-sheets-sort": "0.18.0",
|
||||
"@univerjs/presets": "0.18.0",
|
||||
"@zumer/snapdom": "2.5.0",
|
||||
"@univerjs/preset-sheets-conditional-formatting": "0.16.1",
|
||||
"@univerjs/preset-sheets-core": "0.16.1",
|
||||
"@univerjs/preset-sheets-data-validation": "0.16.1",
|
||||
"@univerjs/preset-sheets-filter": "0.16.1",
|
||||
"@univerjs/preset-sheets-find-replace": "0.16.1",
|
||||
"@univerjs/preset-sheets-note": "0.16.1",
|
||||
"@univerjs/preset-sheets-sort": "0.16.1",
|
||||
"@univerjs/presets": "0.16.1",
|
||||
"@zumer/snapdom": "2.1.0",
|
||||
"autocomplete.js": "0.38.1",
|
||||
"bootstrap": "5.3.8",
|
||||
"boxicons": "2.1.4",
|
||||
@@ -51,26 +51,27 @@
|
||||
"color": "5.0.3",
|
||||
"debounce": "3.0.0",
|
||||
"draggabilly": "3.0.0",
|
||||
"force-graph": "1.51.2",
|
||||
"force-graph": "1.51.1",
|
||||
"globals": "17.4.0",
|
||||
"i18next": "25.10.3",
|
||||
"i18next": "25.8.17",
|
||||
"i18next-http-backend": "3.0.2",
|
||||
"jquery": "4.0.0",
|
||||
"jquery.fancytree": "2.38.5",
|
||||
"jsplumb": "2.15.6",
|
||||
"katex": "0.16.40",
|
||||
"katex": "0.16.38",
|
||||
"knockout": "3.5.1",
|
||||
"leaflet": "1.9.4",
|
||||
"leaflet-gpx": "2.2.0",
|
||||
"mark.js": "8.11.1",
|
||||
"marked": "17.0.5",
|
||||
"mermaid": "11.13.0",
|
||||
"marked": "17.0.4",
|
||||
"mermaid": "11.12.3",
|
||||
"mind-elixir": "5.9.3",
|
||||
"normalize.css": "8.0.1",
|
||||
"panzoom": "9.4.3",
|
||||
"preact": "10.29.0",
|
||||
"react-i18next": "16.6.0",
|
||||
"react-i18next": "16.5.6",
|
||||
"react-window": "2.2.7",
|
||||
"reveal.js": "6.0.0",
|
||||
"reveal.js": "5.2.1",
|
||||
"rrule": "2.8.1",
|
||||
"svg-pan-zoom": "3.6.2",
|
||||
"tabulator-tables": "6.4.0",
|
||||
@@ -84,11 +85,12 @@
|
||||
"@types/leaflet": "1.9.21",
|
||||
"@types/leaflet-gpx": "1.3.8",
|
||||
"@types/mark.js": "8.11.12",
|
||||
"@types/reveal.js": "5.2.2",
|
||||
"@types/tabulator-tables": "6.3.1",
|
||||
"copy-webpack-plugin": "14.0.0",
|
||||
"happy-dom": "20.8.4",
|
||||
"happy-dom": "20.8.3",
|
||||
"lightningcss": "1.32.0",
|
||||
"script-loader": "0.7.2",
|
||||
"vite-plugin-static-copy": "3.3.0"
|
||||
"vite-plugin-static-copy": "3.2.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -67,6 +67,14 @@ async function createNote(parentNotePath: string | undefined, options: CreateNot
|
||||
|
||||
const parentNoteId = treeService.getNoteIdFromUrl(parentNotePath);
|
||||
|
||||
if (options.type === "mermaid" && !options.content && !options.templateNoteId) {
|
||||
options.content = `graph TD;
|
||||
A-->B;
|
||||
A-->C;
|
||||
B-->D;
|
||||
C-->D;`;
|
||||
}
|
||||
|
||||
const { note, branch } = await server.post<Response>(`notes/${parentNoteId}/children?target=${options.target}&targetBranchId=${options.targetBranchId || ""}`, {
|
||||
title: options.title,
|
||||
content: options.content || "",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
export type LabelType = "text" | "textarea" | "number" | "boolean" | "date" | "datetime" | "time" | "url" | "color";
|
||||
export type LabelType = "text" | "number" | "boolean" | "date" | "datetime" | "time" | "url" | "color";
|
||||
type Multiplicity = "single" | "multi";
|
||||
|
||||
export interface DefinitionObject {
|
||||
@@ -17,7 +17,7 @@ function parse(value: string) {
|
||||
for (const token of tokens) {
|
||||
if (token === "promoted") {
|
||||
defObj.isPromoted = true;
|
||||
} else if (["text", "textarea", "number", "boolean", "date", "datetime", "time", "url", "color"].includes(token)) {
|
||||
} else if (["text", "number", "boolean", "date", "datetime", "time", "url", "color"].includes(token)) {
|
||||
defObj.labelType = token as LabelType;
|
||||
} else if (["single", "multi"].includes(token)) {
|
||||
defObj.multiplicity = token as Multiplicity;
|
||||
|
||||
@@ -93,7 +93,7 @@ async function upload(url: string, fileToUpload: File, componentId?: string, met
|
||||
const formData = new FormData();
|
||||
formData.append("upload", fileToUpload);
|
||||
|
||||
const doUpload = async () => $.ajax({
|
||||
return await $.ajax({
|
||||
url: window.glob.baseApiUrl + url,
|
||||
headers: await getHeaders(componentId ? {
|
||||
"trilium-component-id": componentId
|
||||
@@ -104,18 +104,6 @@ async function upload(url: string, fileToUpload: File, componentId?: string, met
|
||||
contentType: false, // NEEDED, DON'T REMOVE THIS
|
||||
processData: false // NEEDED, DON'T REMOVE THIS
|
||||
});
|
||||
|
||||
try {
|
||||
return await doUpload();
|
||||
} catch (e: unknown) {
|
||||
// jQuery rejects with the jqXHR object
|
||||
const jqXhr = e as JQuery.jqXHR;
|
||||
if (jqXhr?.status && isCsrfError(jqXhr.status, jqXhr.responseText)) {
|
||||
await refreshCsrfToken();
|
||||
return await doUpload();
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
let idCounter = 1;
|
||||
@@ -124,55 +112,12 @@ const idToRequestMap: Record<string, RequestData> = {};
|
||||
|
||||
let maxKnownEntityChangeId = 0;
|
||||
|
||||
let csrfRefreshInProgress: Promise<void> | null = null;
|
||||
|
||||
/**
|
||||
* Re-fetches /bootstrap to obtain a fresh CSRF token. This is needed when the
|
||||
* server session expires (e.g. mobile tab backgrounded for a long time) and the
|
||||
* existing CSRF token is no longer valid.
|
||||
*
|
||||
* Coalesces concurrent calls so only one bootstrap request is in-flight at a time.
|
||||
*/
|
||||
async function refreshCsrfToken(): Promise<void> {
|
||||
if (csrfRefreshInProgress) {
|
||||
return csrfRefreshInProgress;
|
||||
}
|
||||
|
||||
csrfRefreshInProgress = (async () => {
|
||||
try {
|
||||
const response = await fetch(`./bootstrap${window.location.search}`, { cache: "no-store" });
|
||||
if (response.ok) {
|
||||
const json = await response.json();
|
||||
glob.csrfToken = json.csrfToken;
|
||||
}
|
||||
} finally {
|
||||
csrfRefreshInProgress = null;
|
||||
}
|
||||
})();
|
||||
|
||||
return csrfRefreshInProgress;
|
||||
}
|
||||
|
||||
function isCsrfError(status: number, responseText: string): boolean {
|
||||
if (status !== 403) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
const body = JSON.parse(responseText);
|
||||
return body.message === "Invalid CSRF token";
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
interface CallOptions {
|
||||
data?: unknown;
|
||||
silentNotFound?: boolean;
|
||||
silentInternalServerError?: boolean;
|
||||
// If `true`, the value will be returned as a string instead of a JavaScript object if JSON, XMLDocument if XML, etc.
|
||||
raw?: boolean;
|
||||
/** Used internally to prevent infinite retry loops on CSRF refresh. */
|
||||
csrfRetried?: boolean;
|
||||
}
|
||||
|
||||
async function call<T>(method: string, url: string, componentId?: string, options: CallOptions = {}) {
|
||||
@@ -222,7 +167,7 @@ function ajax(url: string, method: string, data: unknown, headers: Headers, opts
|
||||
type: method,
|
||||
headers,
|
||||
timeout: 60000,
|
||||
success: (body, _textStatus, jqXhr) => {
|
||||
success: (body, textStatus, jqXhr) => {
|
||||
const respHeaders: Headers = {};
|
||||
|
||||
jqXhr
|
||||
@@ -247,25 +192,7 @@ function ajax(url: string, method: string, data: unknown, headers: Headers, opts
|
||||
// don't report requests that are rejected by the browser, usually when the user is refreshing or going to a different page.
|
||||
rej("rejected by browser");
|
||||
return;
|
||||
}
|
||||
|
||||
// If the CSRF token is stale (e.g. session expired while tab was backgrounded),
|
||||
// refresh it and retry the request once.
|
||||
if (!opts.csrfRetried && isCsrfError(jqXhr.status, jqXhr.responseText)) {
|
||||
try {
|
||||
await refreshCsrfToken();
|
||||
// Rebuild headers so the fresh glob.csrfToken is picked up
|
||||
const retryHeaders = await getHeaders({ "trilium-component-id": headers["trilium-component-id"] });
|
||||
const retryResult = await ajax(url, method, data, retryHeaders, { ...opts, csrfRetried: true });
|
||||
res(retryResult);
|
||||
return;
|
||||
} catch (retryErr) {
|
||||
rej(retryErr);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.silentNotFound && jqXhr.status === 404) {
|
||||
} else if (opts.silentNotFound && jqXhr.status === 404) {
|
||||
// report nothing
|
||||
} else if (opts.silentInternalServerError && jqXhr.status === 500) {
|
||||
// report nothing
|
||||
|
||||
@@ -1,107 +1,66 @@
|
||||
import "jquery";
|
||||
|
||||
import utils from "./services/utils.js";
|
||||
import ko from "knockout";
|
||||
|
||||
type SetupStep = "sync-in-progress" | "setup-type" | "new-document-in-progress" | "sync-from-desktop" | "sync-from-server";
|
||||
type SetupType = "new-document" | "sync-from-desktop" | "sync-from-server" | "";
|
||||
// TriliumNextTODO: properly make use of below types
|
||||
// type SetupModelSetupType = "new-document" | "sync-from-desktop" | "sync-from-server" | "";
|
||||
// type SetupModelStep = "sync-in-progress" | "setup-type" | "new-document-in-progress" | "sync-from-desktop";
|
||||
|
||||
class SetupController {
|
||||
private step: SetupStep;
|
||||
private setupType: SetupType = "";
|
||||
private syncPollIntervalId: number | null = null;
|
||||
private rootNode: HTMLElement;
|
||||
private setupTypeForm: HTMLFormElement;
|
||||
private syncFromServerForm: HTMLFormElement;
|
||||
private setupTypeNextButton: HTMLButtonElement;
|
||||
private setupTypeInputs: HTMLInputElement[];
|
||||
private syncServerHostInput: HTMLInputElement;
|
||||
private syncProxyInput: HTMLInputElement;
|
||||
private passwordInput: HTMLInputElement;
|
||||
private sections: Record<SetupStep, HTMLElement>;
|
||||
class SetupModel {
|
||||
syncInProgress: boolean;
|
||||
step: ko.Observable<string>;
|
||||
setupType: ko.Observable<string>;
|
||||
setupNewDocument: ko.Observable<boolean>;
|
||||
setupSyncFromDesktop: ko.Observable<boolean>;
|
||||
setupSyncFromServer: ko.Observable<boolean>;
|
||||
syncServerHost: ko.Observable<string | undefined>;
|
||||
syncProxy: ko.Observable<string | undefined>;
|
||||
password: ko.Observable<string | undefined>;
|
||||
|
||||
constructor(rootNode: HTMLElement, syncInProgress: boolean) {
|
||||
this.rootNode = rootNode;
|
||||
this.step = syncInProgress ? "sync-in-progress" : "setup-type";
|
||||
this.setupTypeForm = mustGetElement("setup-type-form", HTMLFormElement);
|
||||
this.syncFromServerForm = mustGetElement("sync-from-server-form", HTMLFormElement);
|
||||
this.setupTypeNextButton = mustGetElement("setup-type-next", HTMLButtonElement);
|
||||
this.setupTypeInputs = Array.from(document.querySelectorAll<HTMLInputElement>("input[name='setup-type']"));
|
||||
this.syncServerHostInput = mustGetElement("sync-server-host", HTMLInputElement);
|
||||
this.syncProxyInput = mustGetElement("sync-proxy", HTMLInputElement);
|
||||
this.passwordInput = mustGetElement("password", HTMLInputElement);
|
||||
this.sections = {
|
||||
"setup-type": mustGetElement("setup-type-section", HTMLElement),
|
||||
"new-document-in-progress": mustGetElement("new-document-in-progress-section", HTMLElement),
|
||||
"sync-from-desktop": mustGetElement("sync-from-desktop-section", HTMLElement),
|
||||
"sync-from-server": mustGetElement("sync-from-server-section", HTMLElement),
|
||||
"sync-in-progress": mustGetElement("sync-in-progress-section", HTMLElement)
|
||||
};
|
||||
}
|
||||
constructor(syncInProgress: boolean) {
|
||||
this.syncInProgress = syncInProgress;
|
||||
this.step = ko.observable(syncInProgress ? "sync-in-progress" : "setup-type");
|
||||
this.setupType = ko.observable("");
|
||||
this.setupNewDocument = ko.observable(false);
|
||||
this.setupSyncFromDesktop = ko.observable(false);
|
||||
this.setupSyncFromServer = ko.observable(false);
|
||||
this.syncServerHost = ko.observable();
|
||||
this.syncProxy = ko.observable();
|
||||
this.password = ko.observable();
|
||||
|
||||
init() {
|
||||
this.setupTypeForm.addEventListener("submit", (event) => {
|
||||
event.preventDefault();
|
||||
void this.selectSetupType();
|
||||
});
|
||||
|
||||
this.syncFromServerForm.addEventListener("submit", (event) => {
|
||||
event.preventDefault();
|
||||
void this.finish();
|
||||
});
|
||||
|
||||
for (const input of this.setupTypeInputs) {
|
||||
input.addEventListener("change", () => {
|
||||
this.setupType = input.value as SetupType;
|
||||
this.render();
|
||||
});
|
||||
if (this.syncInProgress) {
|
||||
setInterval(checkOutstandingSyncs, 1000);
|
||||
}
|
||||
|
||||
for (const backButton of document.querySelectorAll<HTMLElement>("[data-action='back']")) {
|
||||
backButton.addEventListener("click", () => {
|
||||
this.back();
|
||||
});
|
||||
}
|
||||
|
||||
const serverAddress = `${location.protocol}//${location.host}`;
|
||||
$("#current-host").html(serverAddress);
|
||||
|
||||
if (this.step === "sync-in-progress") {
|
||||
this.startSyncPolling();
|
||||
}
|
||||
|
||||
this.render();
|
||||
this.rootNode.style.display = "";
|
||||
}
|
||||
|
||||
private async selectSetupType() {
|
||||
if (this.setupType === "new-document") {
|
||||
this.setStep("new-document-in-progress");
|
||||
// this is called in setup.ejs
|
||||
setupTypeSelected() {
|
||||
return !!this.setupType();
|
||||
}
|
||||
|
||||
await $.post("api/setup/new-document");
|
||||
window.location.replace("./setup");
|
||||
return;
|
||||
}
|
||||
selectSetupType() {
|
||||
if (this.setupType() === "new-document") {
|
||||
this.step("new-document-in-progress");
|
||||
|
||||
if (this.setupType) {
|
||||
this.setStep(this.setupType);
|
||||
$.post("api/setup/new-document").then(() => {
|
||||
window.location.replace("./setup");
|
||||
});
|
||||
} else {
|
||||
this.step(this.setupType());
|
||||
}
|
||||
}
|
||||
|
||||
private back() {
|
||||
this.setStep("setup-type");
|
||||
this.setupType = "";
|
||||
|
||||
for (const input of this.setupTypeInputs) {
|
||||
input.checked = false;
|
||||
}
|
||||
|
||||
this.render();
|
||||
back() {
|
||||
this.step("setup-type");
|
||||
this.setupType("");
|
||||
}
|
||||
|
||||
private async finish() {
|
||||
const syncServerHost = this.syncServerHostInput.value.trim();
|
||||
const syncProxy = this.syncProxyInput.value.trim();
|
||||
const password = this.passwordInput.value;
|
||||
async finish() {
|
||||
const syncServerHost = this.syncServerHost();
|
||||
const syncProxy = this.syncProxy();
|
||||
const password = this.password();
|
||||
|
||||
if (!syncServerHost) {
|
||||
showAlert("Trilium server address can't be empty");
|
||||
@@ -115,44 +74,21 @@ class SetupController {
|
||||
|
||||
// not using server.js because it loads too many dependencies
|
||||
const resp = await $.post("api/setup/sync-from-server", {
|
||||
syncServerHost,
|
||||
syncProxy,
|
||||
password
|
||||
syncServerHost: syncServerHost,
|
||||
syncProxy: syncProxy,
|
||||
password: password
|
||||
});
|
||||
|
||||
if (resp.result === "success") {
|
||||
this.step("sync-in-progress");
|
||||
|
||||
setInterval(checkOutstandingSyncs, 1000);
|
||||
|
||||
hideAlert();
|
||||
this.setStep("sync-in-progress");
|
||||
this.startSyncPolling();
|
||||
} else {
|
||||
showAlert(`Sync setup failed: ${resp.error}`);
|
||||
}
|
||||
}
|
||||
|
||||
private setStep(step: SetupStep) {
|
||||
this.step = step;
|
||||
this.render();
|
||||
}
|
||||
|
||||
private render() {
|
||||
for (const [step, section] of Object.entries(this.sections) as [SetupStep, HTMLElement][]) {
|
||||
section.style.display = step === this.step ? "" : "none";
|
||||
}
|
||||
|
||||
this.setupTypeNextButton.disabled = !this.setupType;
|
||||
}
|
||||
|
||||
private getSelectedSetupType(): SetupType {
|
||||
return (this.setupTypeInputs.find((input) => input.checked)?.value ?? "") as SetupType;
|
||||
}
|
||||
|
||||
private startSyncPolling() {
|
||||
if (this.syncPollIntervalId !== null) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.syncPollIntervalId = window.setInterval(checkOutstandingSyncs, 1000);
|
||||
}
|
||||
}
|
||||
|
||||
async function checkOutstandingSyncs() {
|
||||
@@ -186,19 +122,7 @@ function getSyncInProgress() {
|
||||
return !!parseInt(el.content);
|
||||
}
|
||||
|
||||
function mustGetElement<T extends typeof HTMLElement>(id: string, ctor: T): InstanceType<T> {
|
||||
const element = document.getElementById(id);
|
||||
|
||||
if (!element || !(element instanceof ctor)) {
|
||||
throw new Error(`Expected element #${id}`);
|
||||
}
|
||||
|
||||
return element as InstanceType<T>;
|
||||
}
|
||||
|
||||
addEventListener("DOMContentLoaded", (event) => {
|
||||
const rootNode = document.getElementById("setup-dialog");
|
||||
if (!rootNode || !(rootNode instanceof HTMLElement)) return;
|
||||
|
||||
new SetupController(rootNode, getSyncInProgress()).init();
|
||||
ko.applyBindings(new SetupModel(getSyncInProgress()), document.getElementById("setup-dialog"));
|
||||
$("#setup-dialog").show();
|
||||
});
|
||||
|
||||
@@ -1612,7 +1612,11 @@ body:not(.mobile) #launcher-pane.horizontal .dropdown-submenu > .dropdown-menu {
|
||||
}
|
||||
|
||||
body.mobile #launcher-container {
|
||||
justify-content: space-evenly;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
body.mobile #launcher-container button {
|
||||
margin: 0 16px;
|
||||
}
|
||||
|
||||
body.mobile .modal.show {
|
||||
|
||||
@@ -675,11 +675,10 @@ li.dropdown-item a.dropdown-item-button:focus-visible {
|
||||
div.alert {
|
||||
margin-bottom: 8px;
|
||||
background: var(--alert-bar-background) !important;
|
||||
color: var(--main-text-color);
|
||||
border-radius: 8px;
|
||||
font-size: .85em;
|
||||
}
|
||||
|
||||
div.alert p + p {
|
||||
margin-block: 1em 0;
|
||||
}
|
||||
}
|
||||
@@ -1069,6 +1069,7 @@
|
||||
"rename_note": "اعادة تسمية الملاحظة",
|
||||
"remove_relation": "حذف العلاقة",
|
||||
"default_new_note_title": "ملاحظة جديدة",
|
||||
"open_in_new_tab": "فتح في تبويب جديد",
|
||||
"enter_new_title": "ادخل عنوان ملاحظة جديدة:",
|
||||
"note_not_found": "الملاحظة {{noteId}} غير موجودة!",
|
||||
"cannot_match_transform": "تعذر مطابقة التحويل: {{transform}}"
|
||||
|
||||
@@ -1047,6 +1047,7 @@
|
||||
"unprotecting-title": "解除保护状态"
|
||||
},
|
||||
"relation_map": {
|
||||
"open_in_new_tab": "在新标签页中打开",
|
||||
"remove_note": "删除笔记",
|
||||
"edit_title": "编辑标题",
|
||||
"rename_note": "重命名笔记",
|
||||
|
||||
@@ -1046,6 +1046,7 @@
|
||||
"unprotecting-title": "Ungeschützt-Status"
|
||||
},
|
||||
"relation_map": {
|
||||
"open_in_new_tab": "In neuem Tab öffnen",
|
||||
"remove_note": "Notiz entfernen",
|
||||
"edit_title": "Titel bearbeiten",
|
||||
"rename_note": "Notiz umbenennen",
|
||||
@@ -2182,52 +2183,5 @@
|
||||
},
|
||||
"setup_form": {
|
||||
"more_info": "Mehr erfahren"
|
||||
},
|
||||
"media": {
|
||||
"play": "Abspielen (Arbeitsbereich)",
|
||||
"pause": "Pausieren (Arbeitsbereich)",
|
||||
"back-10s": "10 s zurück (Linke Pfeiltaste)",
|
||||
"forward-30s": "30 s vorwärts",
|
||||
"mute": "Stumm (M)",
|
||||
"unmute": "Stummschaltung aufheben (M)",
|
||||
"playback-speed": "Wiedergabegeschwindigkeit",
|
||||
"loop": "Schleife",
|
||||
"disable-loop": "Schleife deaktivieren",
|
||||
"rotate": "Rotieren",
|
||||
"picture-in-picture": "Bild-in-Bild",
|
||||
"exit-picture-in-picture": "Bild-in-Bild verlassen",
|
||||
"fullscreen": "Vollbild (F)",
|
||||
"exit-fullscreen": "Vollbild verlassen",
|
||||
"unsupported-format": "Medienvorschau ist für dieses Format nicht verfügbar:\n{{mime}}",
|
||||
"zoom-to-fit": "Zoomen um auszufüllen",
|
||||
"zoom-reset": "Zoomen um auszufüllen zurücksetzen"
|
||||
},
|
||||
"mermaid": {
|
||||
"placeholder": "Geben den Inhalt des Mermaid-Diagramms ein oder verwenden eine der folgenden Beispieldiagramme.",
|
||||
"sample_diagrams": "Beispieldiagramme:",
|
||||
"sample_flowchart": "Flussdiagramm",
|
||||
"sample_class": "Klasse",
|
||||
"sample_sequence": "Abfolge",
|
||||
"sample_entity_relationship": "Entität Beziehung",
|
||||
"sample_state": "Zustandsübergangsdiagramm",
|
||||
"sample_mindmap": "Mindmap",
|
||||
"sample_architecture": "Architektur",
|
||||
"sample_block": "Block",
|
||||
"sample_c4": "C4",
|
||||
"sample_gantt": "Gantt",
|
||||
"sample_git": "GitGraph",
|
||||
"sample_kanban": "Kanban",
|
||||
"sample_packet": "Paket",
|
||||
"sample_pie": "Kuchen",
|
||||
"sample_quadrant": "Quadrant",
|
||||
"sample_radar": "Radar",
|
||||
"sample_requirement": "Anforderung",
|
||||
"sample_sankey": "Sankey",
|
||||
"sample_timeline": "Zeitstrahl",
|
||||
"sample_treemap": "Kachel",
|
||||
"sample_user_journey": "Benutzererfahrung",
|
||||
"sample_xy": "XY",
|
||||
"sample_venn": "Mengen",
|
||||
"sample_ishikawa": "Ursache-Wirkung"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"about": {
|
||||
"title": "Σχετικά με το Trilium Notes",
|
||||
"title": "Πληροφορίες για το Trilium Notes",
|
||||
"homepage": "Αρχική Σελίδα:",
|
||||
"app_version": "Έκδοση εφαρμογής:",
|
||||
"db_version": "Έκδοση βάσης δεδομένων:",
|
||||
|
||||
@@ -343,7 +343,6 @@
|
||||
"label_type_title": "Type of the label will help Trilium to choose suitable interface to enter the label value.",
|
||||
"label_type": "Type",
|
||||
"text": "Text",
|
||||
"textarea": "Multi-line Text",
|
||||
"number": "Number",
|
||||
"boolean": "Boolean",
|
||||
"date": "Date",
|
||||
@@ -1069,6 +1068,7 @@
|
||||
"unprotecting-title": "Unprotecting status"
|
||||
},
|
||||
"relation_map": {
|
||||
"open_in_new_tab": "Open in new tab",
|
||||
"remove_note": "Remove note",
|
||||
"edit_title": "Edit title",
|
||||
"rename_note": "Rename note",
|
||||
@@ -1292,6 +1292,10 @@
|
||||
"erase_excess_revision_snapshots": "Erase excess revision snapshots now",
|
||||
"erase_excess_revision_snapshots_prompt": "Excess revision snapshots have been erased."
|
||||
},
|
||||
"search": {
|
||||
"title": "Search",
|
||||
"enable_fuzzy_matching": "Enable fuzzy matching in search (matches similar words when exact matches are insufficient)"
|
||||
},
|
||||
"search_engine": {
|
||||
"title": "Search Engine",
|
||||
"custom_search_engine_info": "Custom search engine requires both a name and a URL to be set. If either of these is not set, DuckDuckGo will be used as the default search engine.",
|
||||
@@ -2202,33 +2206,5 @@
|
||||
},
|
||||
"setup_form": {
|
||||
"more_info": "Learn more"
|
||||
},
|
||||
"mermaid": {
|
||||
"placeholder": "Type the content of your Mermaid diagram or use one of the sample diagrams below.",
|
||||
"sample_diagrams": "Sample diagrams:",
|
||||
"sample_flowchart": "Flowchart",
|
||||
"sample_class": "Class",
|
||||
"sample_sequence": "Sequence",
|
||||
"sample_entity_relationship": "Entity Relationship",
|
||||
"sample_state": "State",
|
||||
"sample_mindmap": "Mindmap",
|
||||
"sample_architecture": "Architecture",
|
||||
"sample_block": "Block",
|
||||
"sample_c4": "C4",
|
||||
"sample_gantt": "Gantt",
|
||||
"sample_git": "Git",
|
||||
"sample_kanban": "Kanban",
|
||||
"sample_packet": "Packet",
|
||||
"sample_pie": "Pie",
|
||||
"sample_quadrant": "Quadrant",
|
||||
"sample_radar": "Radar",
|
||||
"sample_requirement": "Requirement",
|
||||
"sample_sankey": "Sankey",
|
||||
"sample_timeline": "Timeline",
|
||||
"sample_treemap": "Treemap",
|
||||
"sample_user_journey": "User Journey",
|
||||
"sample_xy": "XY",
|
||||
"sample_venn": "Venn",
|
||||
"sample_ishikawa": "Ishikawa"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1051,6 +1051,7 @@
|
||||
"unprotecting-title": "Estado de desprotección"
|
||||
},
|
||||
"relation_map": {
|
||||
"open_in_new_tab": "Abrir en nueva pestaña",
|
||||
"remove_note": "Quitar nota",
|
||||
"edit_title": "Editar título",
|
||||
"rename_note": "Cambiar nombre de nota",
|
||||
@@ -2197,52 +2198,5 @@
|
||||
},
|
||||
"setup_form": {
|
||||
"more_info": "Para saber más"
|
||||
},
|
||||
"media": {
|
||||
"play": "Reproducir (Espacio)",
|
||||
"pause": "Pausa (Espacio)",
|
||||
"back-10s": "Retroceder 10s (tecla de flecha izquierda)",
|
||||
"forward-30s": "Adelantar 30s",
|
||||
"mute": "Silenciar (M)",
|
||||
"unmute": "Activar sonido (M)",
|
||||
"playback-speed": "Velocidad de reproducción",
|
||||
"loop": "Bucle",
|
||||
"disable-loop": "Deshabilitar bucle",
|
||||
"rotate": "Rotar",
|
||||
"picture-in-picture": "Imagen en imagen",
|
||||
"exit-picture-in-picture": "Salir del modo imagen en imagen",
|
||||
"fullscreen": "Pantalla completa (F)",
|
||||
"exit-fullscreen": "Salir de la pantalla completa",
|
||||
"unsupported-format": "La vista previa del medio no está disponible para este formato de archivo:\n{{mime}}",
|
||||
"zoom-to-fit": "Acercamiento para llenar",
|
||||
"zoom-reset": "Reiniciar acercamiento para llenar"
|
||||
},
|
||||
"mermaid": {
|
||||
"placeholder": "Ingrese el contenido de su diagrama Mermaid o utilice uno de los diagramas de muestra a continuación.",
|
||||
"sample_diagrams": "Diagramas de muestra:",
|
||||
"sample_flowchart": "Diagrama de flujo",
|
||||
"sample_class": "Clase",
|
||||
"sample_sequence": "Secuencia",
|
||||
"sample_entity_relationship": "Relación entre entidades",
|
||||
"sample_state": "Estado",
|
||||
"sample_mindmap": "Mapa mental",
|
||||
"sample_architecture": "Arquitectura",
|
||||
"sample_block": "Bloque",
|
||||
"sample_c4": "C4",
|
||||
"sample_gantt": "Gantt",
|
||||
"sample_git": "Git",
|
||||
"sample_kanban": "Kanban",
|
||||
"sample_packet": "Paquete",
|
||||
"sample_pie": "Pastel",
|
||||
"sample_quadrant": "Cuadrante",
|
||||
"sample_radar": "Radar",
|
||||
"sample_requirement": "Requerimiento",
|
||||
"sample_sankey": "Sankey",
|
||||
"sample_timeline": "Línea de tiempo",
|
||||
"sample_user_journey": "Jornada de usuario",
|
||||
"sample_xy": "XY",
|
||||
"sample_venn": "Venn",
|
||||
"sample_ishikawa": "Ishikawa",
|
||||
"sample_treemap": "Mapa de árbol"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1036,6 +1036,7 @@
|
||||
"unprotecting-title": "Statut de la non-protection"
|
||||
},
|
||||
"relation_map": {
|
||||
"open_in_new_tab": "Ouvrir dans un nouvel onglet",
|
||||
"remove_note": "Supprimer la note",
|
||||
"edit_title": "Modifier le titre",
|
||||
"rename_note": "Renommer la note",
|
||||
|
||||
@@ -1055,6 +1055,7 @@
|
||||
"unprotecting-title": "Stádas díchosanta"
|
||||
},
|
||||
"relation_map": {
|
||||
"open_in_new_tab": "Oscail i gcluaisín nua",
|
||||
"remove_note": "Bain nóta",
|
||||
"edit_title": "Cuir an teideal in eagar",
|
||||
"rename_note": "Athainmnigh an nóta",
|
||||
@@ -2227,52 +2228,5 @@
|
||||
},
|
||||
"setup_form": {
|
||||
"more_info": "Foghlaim níos mó"
|
||||
},
|
||||
"media": {
|
||||
"play": "Seinn (Spás)",
|
||||
"pause": "Sos (Spás)",
|
||||
"back-10s": "10 soicind ar ais (eochair saighead chlé)",
|
||||
"forward-30s": "Ar aghaidh 30s",
|
||||
"mute": "Balbhaigh (M)",
|
||||
"unmute": "Díbhalbhaigh (M)",
|
||||
"playback-speed": "Luas athsheinm",
|
||||
"loop": "Lúb",
|
||||
"disable-loop": "Díchumasaigh an lúb",
|
||||
"rotate": "Rothlaigh",
|
||||
"picture-in-picture": "Pictiúr i bpictiúr",
|
||||
"exit-picture-in-picture": "Scoir pictiúr-i-bpictiúr",
|
||||
"fullscreen": "Lánscáileán (F)",
|
||||
"exit-fullscreen": "Scoir lánscáileáin",
|
||||
"unsupported-format": "Níl réamhamharc meán ar fáil don fhormáid comhaid seo:\n{{mime}}",
|
||||
"zoom-to-fit": "Zúmáil chun líonadh",
|
||||
"zoom-reset": "Athshocraigh súmáil chun líonadh"
|
||||
},
|
||||
"mermaid": {
|
||||
"placeholder": "Clóscríobh ábhar do léaráid Maighdean Mhara nó bain úsáid as ceann de na léaráidí samplacha thíos.",
|
||||
"sample_diagrams": "Léaráidí samplacha:",
|
||||
"sample_flowchart": "Cairt Sreabhadh",
|
||||
"sample_class": "Rang",
|
||||
"sample_sequence": "Seicheamh",
|
||||
"sample_entity_relationship": "Gaol Eintitis",
|
||||
"sample_state": "Stát",
|
||||
"sample_mindmap": "Léarscáil intinne",
|
||||
"sample_architecture": "Ailtireacht",
|
||||
"sample_block": "Bloc",
|
||||
"sample_c4": "C4",
|
||||
"sample_gantt": "Gantt",
|
||||
"sample_git": "Git",
|
||||
"sample_kanban": "Kanban",
|
||||
"sample_packet": "Paicéad",
|
||||
"sample_pie": "Pióg",
|
||||
"sample_quadrant": "Ceathrú",
|
||||
"sample_radar": "Radar",
|
||||
"sample_requirement": "Riachtanas",
|
||||
"sample_sankey": "Sankey",
|
||||
"sample_timeline": "Amlíne",
|
||||
"sample_treemap": "Léarscáil Crann",
|
||||
"sample_user_journey": "Turas Úsáideora",
|
||||
"sample_xy": "XY",
|
||||
"sample_venn": "Venn",
|
||||
"sample_ishikawa": "Ishikawa"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1049,6 +1049,7 @@
|
||||
"unprotecting-title": "अन-प्रोटेक्ट स्टेटस"
|
||||
},
|
||||
"relation_map": {
|
||||
"open_in_new_tab": "नए टैब में खोलें",
|
||||
"remove_note": "नोट हटाएं",
|
||||
"edit_title": "टाइटल एडिट करें",
|
||||
"rename_note": "नोट का नाम बदलें",
|
||||
|
||||
@@ -1424,6 +1424,7 @@
|
||||
"unprotecting-title": "Stato non protetto"
|
||||
},
|
||||
"relation_map": {
|
||||
"open_in_new_tab": "Apri in una nuova scheda",
|
||||
"remove_note": "Rimuovi nota",
|
||||
"edit_title": "Modifica titolo",
|
||||
"rename_note": "Rinomina nota",
|
||||
|
||||
@@ -588,7 +588,7 @@
|
||||
"note-map": "ノートマップ",
|
||||
"render-note": "レンダリングノート",
|
||||
"book": "コレクション",
|
||||
"mermaid-diagram": "マーメイド図",
|
||||
"mermaid-diagram": "Mermaidダイアグラム",
|
||||
"canvas": "キャンバス",
|
||||
"web-view": "Web ビュー",
|
||||
"mind-map": "マインドマップ",
|
||||
@@ -1180,8 +1180,7 @@
|
||||
"is_owned_by_note": "ノートによって所有されています",
|
||||
"and_more": "...その他 {{count}} 件。",
|
||||
"print_landscape": "PDF にエクスポートするときに、ページの向きを縦向きではなく横向きに変更します。",
|
||||
"print_page_size": "PDF にエクスポートするときに、ページのサイズを変更します。サポートされる値: <code>A0</code>, <code>A1</code>, <code>A2</code>, <code>A3</code>, <code>A4</code>, <code>A5</code>, <code>A6</code>, <code>Legal</code>, <code>Letter</code>, <code>Tabloid</code>, <code>Ledger</code>。",
|
||||
"textarea": "複数行テキスト"
|
||||
"print_page_size": "PDF にエクスポートするときに、ページのサイズを変更します。サポートされる値: <code>A0</code>, <code>A1</code>, <code>A2</code>, <code>A3</code>, <code>A4</code>, <code>A5</code>, <code>A6</code>, <code>Legal</code>, <code>Letter</code>, <code>Tabloid</code>, <code>Ledger</code>。"
|
||||
},
|
||||
"link_context_menu": {
|
||||
"open_note_in_popup": "クイック編集",
|
||||
@@ -1538,6 +1537,7 @@
|
||||
"url_placeholder": "http://web サイト..."
|
||||
},
|
||||
"relation_map": {
|
||||
"open_in_new_tab": "新しいタブで開く",
|
||||
"remove_note": "ノートを削除",
|
||||
"edit_title": "タイトルを編集",
|
||||
"rename_note": "ノート名を変更",
|
||||
@@ -2168,52 +2168,5 @@
|
||||
},
|
||||
"setup_form": {
|
||||
"more_info": "さらに詳しく"
|
||||
},
|
||||
"media": {
|
||||
"play": "再生 (スペース)",
|
||||
"pause": "一時停止 (スペース)",
|
||||
"back-10s": "10 秒戻る (左矢印キー)",
|
||||
"forward-30s": "30 秒進む",
|
||||
"mute": "ミュート (M)",
|
||||
"unmute": "ミュート解除 (M)",
|
||||
"playback-speed": "再生速度",
|
||||
"loop": "ループ",
|
||||
"disable-loop": "ループを解除",
|
||||
"rotate": "回転",
|
||||
"picture-in-picture": "ピクチャーインピクチャー",
|
||||
"exit-picture-in-picture": "ピクチャーインピクチャーを終了",
|
||||
"fullscreen": "全画面表示 (F)",
|
||||
"exit-fullscreen": "全画面表示を終了",
|
||||
"unsupported-format": "このファイル形式ではメディアプレビューはご利用いただけません:\n{{mime}}",
|
||||
"zoom-to-fit": "ズームして全体を表示",
|
||||
"zoom-reset": "ズーム設定をリセット"
|
||||
},
|
||||
"mermaid": {
|
||||
"placeholder": "マーメイド図の内容を入力するか、以下のサンプル図のいずれかを使用してください。",
|
||||
"sample_diagrams": "サンプル図:",
|
||||
"sample_flowchart": "フローチャート",
|
||||
"sample_class": "クラス図",
|
||||
"sample_sequence": "シーケンス図",
|
||||
"sample_entity_relationship": "ER 図",
|
||||
"sample_state": "状態遷移図",
|
||||
"sample_mindmap": "マインドマップ",
|
||||
"sample_architecture": "アーキテクチャ図",
|
||||
"sample_block": "ブロック図",
|
||||
"sample_c4": "C4 図",
|
||||
"sample_gantt": "ガントチャート",
|
||||
"sample_git": "Git グラフ",
|
||||
"sample_kanban": "カンバン",
|
||||
"sample_packet": "パケット図",
|
||||
"sample_pie": "円グラフ",
|
||||
"sample_quadrant": "4象限図",
|
||||
"sample_radar": "レーダーチャート",
|
||||
"sample_requirement": "要件図",
|
||||
"sample_sankey": "サンキー図",
|
||||
"sample_timeline": "タイムライン",
|
||||
"sample_treemap": "ツリーマップ",
|
||||
"sample_user_journey": "ユーザージャーニー図",
|
||||
"sample_xy": "XY チャート",
|
||||
"sample_venn": "ベン図",
|
||||
"sample_ishikawa": "石川図"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@
|
||||
"branch_prefix_saved": "브랜치 접두사가 저장되었습니다.",
|
||||
"edit_branch_prefix_multiple": "{{count}}개의 지점 접두사 편집",
|
||||
"branch_prefix_saved_multiple": "{{count}}개의 지점에 대해 지점 접두사가 저장되었습니다.",
|
||||
"affected_branches": "영향을 받은 분기 수({{count}}):"
|
||||
"affected_branches": "영향을 받는 브랜치 수 ({{count}}):"
|
||||
},
|
||||
"bulk_actions": {
|
||||
"bulk_actions": "대량 작업",
|
||||
@@ -134,27 +134,6 @@
|
||||
"notSet": "미설정",
|
||||
"goBackForwards": "히스토리에서 뒤로/앞으로 이동",
|
||||
"showJumpToNoteDialog": "<a class=\"external\" href=\"https://triliumnext.github.io/Docs/Wiki/note-navigation.html#jump-to-note\">\"노트로 이동\" 대화 상자</a> 표시",
|
||||
"scrollToActiveNote": "활성화된 노트로 스크롤 이동",
|
||||
"collapseWholeTree": "모든 노트 트리를 접기",
|
||||
"collapseSubTree": "하위 트리 접기",
|
||||
"tabShortcuts": "탭 단축키",
|
||||
"onlyInDesktop": "데스크톱에서만(일렉트론 빌드)",
|
||||
"openEmptyTab": "빈 탭 열기",
|
||||
"closeActiveTab": "활성 탭 닫기",
|
||||
"jumpToParentNote": "부모 노트로 이동하기",
|
||||
"activateNextTab": "다음 탭 활성화",
|
||||
"activatePreviousTab": "이전 탭 활성화",
|
||||
"creatingNotes": "노트 만들기",
|
||||
"createNoteInto": "활성 노트에 새로운 하위 노트 추가",
|
||||
"movingCloningNotes": "노트 이동/복제",
|
||||
"moveNoteUpDown": "노트 목록에서 노트 위/아래 이동",
|
||||
"selectAllNotes": "현재 레벨의 모든 노트 선택",
|
||||
"selectNote": "노트 선택",
|
||||
"deleteNotes": "노트/하위트리 삭제",
|
||||
"editingNotes": "노트 편집",
|
||||
"createEditLink": "외부 링크 생성/편집",
|
||||
"createInternalLink": "내부 링크 생성",
|
||||
"followLink": "커서아래 링크 따라가기",
|
||||
"insertDateTime": "커서위치에 현재 날짜와 시간 삽입"
|
||||
"scrollToActiveNote": "활성화된 노트로 스크롤 이동"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
"widget-render-error": {
|
||||
"title": "Nie udało się wyrenderować niestandardowego widżetu React"
|
||||
},
|
||||
"widget-missing-parent": "Niestandardowy widżet nie ma zdefiniowanej obowiązkowej właściwości „{{property}}”.\n\nJeśli skrypt ma działać bez interfejsu użytkownika (UI) wyłącz go: '#run=frontendStartup'.",
|
||||
"widget-missing-parent": "Niestandardowy widżet nie ma zdefiniowanej obowiązkowej właściwości „{{property}}”.\nJeśli skrypt ma działać bez interfejsu użytkownika (UI) wyłącz go: '#run=frontendStartup'.",
|
||||
"open-script-note": "Otwórz notatkę ze skryptem",
|
||||
"scripting-error": "Błąd skryptu użytkownika: {{title}}"
|
||||
},
|
||||
@@ -1275,6 +1275,7 @@
|
||||
"unprotecting-title": "Status zdejmowania ochrony"
|
||||
},
|
||||
"relation_map": {
|
||||
"open_in_new_tab": "Otwórz w nowej karcie",
|
||||
"remove_note": "Usuń notatkę",
|
||||
"edit_title": "Edytuj tytuł",
|
||||
"rename_note": "Zmień nazwę notatki",
|
||||
@@ -1486,7 +1487,7 @@
|
||||
"custom_name_label": "Nazwa niestandardowej wyszukiwarki",
|
||||
"custom_name_placeholder": "Dostosuj nazwę wyszukiwarki",
|
||||
"custom_url_label": "URL niestandardowej wyszukiwarki powinien zawierać {keyword} jako symbol zastępczy dla wyszukiwanej frazy.",
|
||||
"custom_url_placeholder": "Dostosuj url wyszukiwarki",
|
||||
"custom_url_placeholder": "Dostosuj URL wyszukiwarki",
|
||||
"save_button": "Zapisz"
|
||||
},
|
||||
"tray": {
|
||||
@@ -2197,52 +2198,5 @@
|
||||
},
|
||||
"setup_form": {
|
||||
"more_info": "Dowiedz się więcej"
|
||||
},
|
||||
"media": {
|
||||
"fullscreen": "Pełny ekran (F)",
|
||||
"mute": "Wycisz (M)",
|
||||
"unmute": "Wyłącz wyciszenie (M)",
|
||||
"exit-fullscreen": "Wyłącz pełny ekran",
|
||||
"loop": "Pętla",
|
||||
"disable-loop": "Wyłącz pętle",
|
||||
"rotate": "Obróć",
|
||||
"picture-in-picture": "Obraz w obrazie",
|
||||
"pause": "Zatrzymaj (Space)",
|
||||
"back-10s": "Cofnij 10s (Lewa strzałka)",
|
||||
"forward-30s": "Do przodu 30s",
|
||||
"playback-speed": "Szybkość odtwarzania",
|
||||
"exit-picture-in-picture": "Wyjdź z obrazu w obrazie",
|
||||
"zoom-to-fit": "Powiększ aby wypełnić",
|
||||
"unsupported-format": "Podgląd multimediów nie jest dostępny dla tego formatu pliku\n{{mime}}",
|
||||
"play": "Odtwórz (Space)",
|
||||
"zoom-reset": "Zresetuj powiększenie"
|
||||
},
|
||||
"mermaid": {
|
||||
"sample_architecture": "Architektura",
|
||||
"sample_diagrams": "Przykład diagramu:",
|
||||
"sample_flowchart": "Schemat blokowy",
|
||||
"sample_class": "Klasa",
|
||||
"sample_sequence": "Sekwencja",
|
||||
"sample_timeline": "Oś czasu",
|
||||
"sample_treemap": "Mapa drzewa",
|
||||
"sample_xy": "XY",
|
||||
"sample_venn": "Diagram Venna",
|
||||
"sample_ishikawa": "Diagram Ishikawa",
|
||||
"placeholder": "Wpisz treść swojego diagramu lub skorzystaj z jednego z przykładowych diagramów poniżej.",
|
||||
"sample_entity_relationship": "Diagram związków encji",
|
||||
"sample_state": "Diagram stanów",
|
||||
"sample_mindmap": "Mapa myśli",
|
||||
"sample_block": "Diagram blokowy",
|
||||
"sample_c4": "C4",
|
||||
"sample_gantt": "Wykres Gantta",
|
||||
"sample_git": "Diagram Git",
|
||||
"sample_kanban": "Kanban",
|
||||
"sample_packet": "Diagram pakietów",
|
||||
"sample_pie": "Wykres kołowy",
|
||||
"sample_quadrant": "Diagram kwadrantowy",
|
||||
"sample_radar": "Wykres radarowy",
|
||||
"sample_requirement": "Diagram wymagań",
|
||||
"sample_sankey": "Wykres Sankeya",
|
||||
"sample_user_journey": "Mapa Podróży Użytkownika"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1047,6 +1047,7 @@
|
||||
"unprotecting-title": "Estado da remoção de proteção"
|
||||
},
|
||||
"relation_map": {
|
||||
"open_in_new_tab": "Abrir em nova guia",
|
||||
"remove_note": "Remover nota",
|
||||
"edit_title": "Editar título",
|
||||
"rename_note": "Renomear nota",
|
||||
|
||||
@@ -1111,6 +1111,7 @@
|
||||
"start_session_button": "Iniciar sessão protegida"
|
||||
},
|
||||
"relation_map": {
|
||||
"open_in_new_tab": "Abrir em nova aba",
|
||||
"remove_note": "Remover nota",
|
||||
"edit_title": "Editar título",
|
||||
"rename_note": "Renomear nota",
|
||||
|
||||
@@ -1054,6 +1054,7 @@
|
||||
"enter_title_of_new_note": "Introduceți titlul noii notițe",
|
||||
"note_already_in_diagram": "Notița „{{title}}” deja se află pe diagramă.",
|
||||
"note_not_found": "Notița „{{noteId}}” nu a putut fi găsită!",
|
||||
"open_in_new_tab": "Deschide într-un tab nou",
|
||||
"remove_note": "Șterge notița",
|
||||
"remove_relation": "Șterge relația",
|
||||
"rename_note": "Redenumește notița",
|
||||
|
||||
@@ -1625,6 +1625,7 @@
|
||||
"rename_note": "Переименовать заметку",
|
||||
"remove_relation": "Удалить отношение",
|
||||
"default_new_note_title": "новая заметка",
|
||||
"open_in_new_tab": "Открыть в новой вкладке",
|
||||
"confirm_remove_relation": "Вы уверены, что хотите удалить связь?",
|
||||
"enter_new_title": "Введите новое название заметки:",
|
||||
"note_not_found": "Заметка {{noteId}} не найдена!",
|
||||
|
||||
@@ -446,8 +446,7 @@
|
||||
"app_theme_base": "設定為 \"next\"、\"next-light \" 或 \"next-dark\",以使用相應的 TriliumNext 主題(自動、淺色或深色)作為自訂主題的基礎,而非傳統主題。",
|
||||
"print_landscape": "匯出為 PDF 時,將頁面方向更改為橫向而非縱向。",
|
||||
"print_page_size": "在匯出 PDF 時更改頁面大小。支援的值:<code>A0</code>、<code>A1</code>、<code>A2</code>、<code>A3</code>、<code>A4</code>、<code>A5</code>、<code>A6</code>、<code>Legal</code>、<code>Letter</code>、<code>Tabloid</code>、<code>Ledger</code>。",
|
||||
"color_type": "顏色",
|
||||
"textarea": "多行文字"
|
||||
"color_type": "顏色"
|
||||
},
|
||||
"attribute_editor": {
|
||||
"help_text_body1": "要新增標籤,只需輸入例如 <code>#rock</code> 或者如果您還想新增值,則例如 <code>#year = 2020</code>",
|
||||
@@ -1047,6 +1046,7 @@
|
||||
"unprotecting-title": "解除保護狀態"
|
||||
},
|
||||
"relation_map": {
|
||||
"open_in_new_tab": "在新分頁中打開",
|
||||
"remove_note": "刪除筆記",
|
||||
"edit_title": "編輯標題",
|
||||
"rename_note": "重新命名筆記",
|
||||
@@ -2183,52 +2183,5 @@
|
||||
},
|
||||
"setup_form": {
|
||||
"more_info": "了解更多"
|
||||
},
|
||||
"media": {
|
||||
"play": "播放 (空白鍵)",
|
||||
"pause": "暫停 (空白鍵)",
|
||||
"back-10s": "往前 10 秒 (左方向鍵)",
|
||||
"forward-30s": "往後 30 秒",
|
||||
"mute": "靜音 (M)",
|
||||
"unmute": "解除靜音 (M)",
|
||||
"playback-speed": "播放速度",
|
||||
"loop": "循環",
|
||||
"disable-loop": "解除循環",
|
||||
"rotate": "旋轉",
|
||||
"picture-in-picture": "畫中畫",
|
||||
"exit-picture-in-picture": "退出畫中畫",
|
||||
"fullscreen": "全螢幕 (F)",
|
||||
"exit-fullscreen": "退出全螢幕",
|
||||
"unsupported-format": "此檔案格式不支援媒體預覽:\n{{mime}}",
|
||||
"zoom-to-fit": "放大至填滿畫面",
|
||||
"zoom-reset": "重設放大至填滿畫面"
|
||||
},
|
||||
"mermaid": {
|
||||
"placeholder": "請輸入您的美人魚圖表內容,或選用下方其中一個範例圖表。",
|
||||
"sample_diagrams": "範例圖表:",
|
||||
"sample_flowchart": "流程圖",
|
||||
"sample_class": "階層圖",
|
||||
"sample_sequence": "時序圖",
|
||||
"sample_entity_relationship": "實體關係圖",
|
||||
"sample_state": "狀態圖",
|
||||
"sample_mindmap": "心智圖",
|
||||
"sample_architecture": "架構圖",
|
||||
"sample_block": "區塊圖",
|
||||
"sample_c4": "C4 圖",
|
||||
"sample_gantt": "甘特圖",
|
||||
"sample_git": "Git 分支圖",
|
||||
"sample_kanban": "看板圖",
|
||||
"sample_packet": "數據包圖",
|
||||
"sample_pie": "圓餅圖",
|
||||
"sample_quadrant": "象限圖",
|
||||
"sample_radar": "雷達圖",
|
||||
"sample_requirement": "需求圖",
|
||||
"sample_sankey": "桑基圖",
|
||||
"sample_timeline": "時間軸",
|
||||
"sample_treemap": "樹狀圖",
|
||||
"sample_user_journey": "用戶旅程",
|
||||
"sample_xy": "XY 圖表",
|
||||
"sample_venn": "韋恩圖",
|
||||
"sample_ishikawa": "魚骨圖"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1151,6 +1151,7 @@
|
||||
"unprotecting-title": "Статус зняття захисту"
|
||||
},
|
||||
"relation_map": {
|
||||
"open_in_new_tab": "Відкрити в новій вкладці",
|
||||
"remove_note": "Видалити нотатку",
|
||||
"edit_title": "Редагувати заголовок",
|
||||
"rename_note": "Перейменувати нотатку",
|
||||
|
||||
@@ -2,7 +2,7 @@ import "./PromotedAttributes.css";
|
||||
|
||||
import { UpdateAttributeResponse } from "@triliumnext/commons";
|
||||
import clsx from "clsx";
|
||||
import { ComponentChild, createElement, HTMLInputTypeAttribute, InputHTMLAttributes, MouseEventHandler, TargetedEvent, TargetedInputEvent } from "preact";
|
||||
import { ComponentChild, HTMLInputTypeAttribute, InputHTMLAttributes, MouseEventHandler, TargetedEvent, TargetedInputEvent } from "preact";
|
||||
import { Dispatch, StateUpdater, useCallback, useEffect, useRef, useState } from "preact/hooks";
|
||||
|
||||
import NoteContext from "../components/note_context";
|
||||
@@ -36,7 +36,7 @@ interface CellProps {
|
||||
setCellToFocus(cell: Cell): void;
|
||||
}
|
||||
|
||||
type OnChangeEventData = TargetedEvent<HTMLInputElement | HTMLTextAreaElement, Event> | InputEvent | JQuery.TriggeredEvent<HTMLInputElement, undefined, HTMLInputElement, HTMLInputElement>;
|
||||
type OnChangeEventData = TargetedEvent<HTMLInputElement, Event> | InputEvent | JQuery.TriggeredEvent<HTMLInputElement, undefined, HTMLInputElement, HTMLInputElement>;
|
||||
type OnChangeListener = (e: OnChangeEventData) => void | Promise<void>;
|
||||
|
||||
export default function PromotedAttributes() {
|
||||
@@ -171,9 +171,8 @@ function PromotedAttributeCell(props: CellProps) {
|
||||
);
|
||||
}
|
||||
|
||||
const LABEL_MAPPINGS: Record<LabelType, HTMLInputTypeAttribute | undefined> = {
|
||||
const LABEL_MAPPINGS: Record<LabelType, HTMLInputTypeAttribute> = {
|
||||
text: "text",
|
||||
textarea: undefined,
|
||||
number: "number",
|
||||
boolean: "checkbox",
|
||||
date: "date",
|
||||
@@ -227,21 +226,20 @@ function LabelInput(props: CellProps & { inputId: string }) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const inputNode = createElement(definition.labelType === "textarea" ? "textarea" : "input", {
|
||||
className: "form-control promoted-attribute-input",
|
||||
tabIndex: 200 + definitionAttr.position,
|
||||
id: inputId,
|
||||
type: LABEL_MAPPINGS[definition.labelType ?? "text"],
|
||||
value: valueDraft,
|
||||
checked: definition.labelType === "boolean" ? valueAttr.value === "true" : undefined,
|
||||
placeholder: t("promoted_attributes.unset-field-placeholder"),
|
||||
"data-attribute-id": valueAttr.attributeId,
|
||||
"data-attribute-type": valueAttr.type,
|
||||
"data-attribute-name": valueAttr.name,
|
||||
onBlur: onChangeListener,
|
||||
...extraInputProps
|
||||
});
|
||||
const inputNode = <input
|
||||
className="form-control promoted-attribute-input"
|
||||
tabIndex={200 + definitionAttr.position}
|
||||
id={inputId}
|
||||
type={LABEL_MAPPINGS[definition.labelType ?? "text"]}
|
||||
value={valueDraft}
|
||||
checked={definition.labelType === "boolean" ? valueAttr.value === "true" : undefined}
|
||||
placeholder={t("promoted_attributes.unset-field-placeholder")}
|
||||
data-attribute-id={valueAttr.attributeId}
|
||||
data-attribute-type={valueAttr.type}
|
||||
data-attribute-name={valueAttr.name}
|
||||
onBlur={onChangeListener}
|
||||
{...extraInputProps}
|
||||
/>;
|
||||
|
||||
if (definition.labelType === "boolean") {
|
||||
return <>
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
import appContext from "../../components/app_context.js";
|
||||
import attributeAutocompleteService from "../../services/attribute_autocomplete.js";
|
||||
import type { Attribute } from "../../services/attribute_parser.js";
|
||||
import { isExperimentalFeatureEnabled } from "../../services/experimental_features.js";
|
||||
import { focusSavedElement, saveFocusedElement } from "../../services/focus.js";
|
||||
import froca from "../../services/froca.js";
|
||||
import { t } from "../../services/i18n.js";
|
||||
import server from "../../services/server.js";
|
||||
import froca from "../../services/froca.js";
|
||||
import linkService from "../../services/link.js";
|
||||
import attributeAutocompleteService from "../../services/attribute_autocomplete.js";
|
||||
import noteAutocompleteService from "../../services/note_autocomplete.js";
|
||||
import promotedAttributeDefinitionParser from "../../services/promoted_attribute_definition_parser.js";
|
||||
import server from "../../services/server.js";
|
||||
import shortcutService from "../../services/shortcuts.js";
|
||||
import NoteContextAwareWidget from "../note_context_aware_widget.js";
|
||||
import SpacedUpdate from "../../services/spaced_update.js";
|
||||
import utils from "../../services/utils.js";
|
||||
import NoteContextAwareWidget from "../note_context_aware_widget.js";
|
||||
import shortcutService from "../../services/shortcuts.js";
|
||||
import appContext from "../../components/app_context.js";
|
||||
import type { Attribute } from "../../services/attribute_parser.js";
|
||||
import { focusSavedElement, saveFocusedElement } from "../../services/focus.js";
|
||||
import { isExperimentalFeatureEnabled } from "../../services/experimental_features.js";
|
||||
|
||||
const TPL = /*html*/`
|
||||
<div class="attr-detail tn-tool-dialog">
|
||||
@@ -29,7 +29,6 @@ const TPL = /*html*/`
|
||||
max-height: 600px;
|
||||
overflow: auto;
|
||||
box-shadow: 10px 10px 93px -25px black;
|
||||
contain: none;
|
||||
}
|
||||
|
||||
.attr-help td {
|
||||
@@ -138,7 +137,6 @@ const TPL = /*html*/`
|
||||
<td>
|
||||
<select class="attr-input-label-type form-control">
|
||||
<option value="text">${t("attribute_detail.text")}</option>
|
||||
<option value="textarea">${t("attribute_detail.textarea")}</option>
|
||||
<option value="number">${t("attribute_detail.number")}</option>
|
||||
<option value="boolean">${t("attribute_detail.boolean")}</option>
|
||||
<option value="date">${t("attribute_detail.date")}</option>
|
||||
@@ -344,7 +342,6 @@ export default class AttributeDetailWidget extends NoteContextAwareWidget {
|
||||
private $relatedNotesList!: JQuery<HTMLElement>;
|
||||
private $relatedNotesMoreNotes!: JQuery<HTMLElement>;
|
||||
private $attrHelp!: JQuery<HTMLElement>;
|
||||
private $statusBar?: JQuery<HTMLElement>;
|
||||
|
||||
private relatedNotesSpacedUpdate!: SpacedUpdate;
|
||||
private attribute!: Attribute;
|
||||
@@ -579,24 +576,17 @@ export default class AttributeDetailWidget extends NoteContextAwareWidget {
|
||||
return;
|
||||
}
|
||||
|
||||
if (isNewLayout) {
|
||||
if (!this.$statusBar) {
|
||||
this.$statusBar = $(document.body).find(".component.status-bar");
|
||||
}
|
||||
this.$widget
|
||||
.css("left", detPosition.left)
|
||||
.css("right", detPosition.right)
|
||||
.css("top", y - offset.top + 70)
|
||||
.css("max-height", outerHeight + y > height - 50 ? height - y - 50 : 10000);
|
||||
|
||||
const statusBarHeight = this.$statusBar.outerHeight() ?? 0;
|
||||
const maxHeight = document.body.clientHeight - statusBarHeight;
|
||||
if (isNewLayout) {
|
||||
this.$widget
|
||||
.css("left", offset.left + (typeof detPosition.left === "number" ? detPosition.left : 0))
|
||||
.css("top", "unset")
|
||||
.css("bottom", statusBarHeight ?? 0)
|
||||
.css("max-height", maxHeight);
|
||||
} else {
|
||||
this.$widget
|
||||
.css("left", detPosition.left)
|
||||
.css("right", detPosition.right)
|
||||
.css("top", y - offset.top + 70)
|
||||
.css("max-height", outerHeight + y > height - 50 ? height - y - 50 : 10000);
|
||||
.css("bottom", 70)
|
||||
.css("max-height", "80vh");
|
||||
}
|
||||
|
||||
if (focus === "name") {
|
||||
@@ -704,14 +694,14 @@ export default class AttributeDetailWidget extends NoteContextAwareWidget {
|
||||
return "label-definition";
|
||||
} else if (attribute.name.startsWith("relation:")) {
|
||||
return "relation-definition";
|
||||
} else {
|
||||
return "label";
|
||||
}
|
||||
return "label";
|
||||
|
||||
} else if (attribute.type === "relation") {
|
||||
return "relation";
|
||||
} else {
|
||||
this.$title.text("");
|
||||
}
|
||||
this.$title.text("");
|
||||
|
||||
}
|
||||
|
||||
updateAttributeInEditor() {
|
||||
|
||||
@@ -364,19 +364,23 @@
|
||||
mask-repeat: no-repeat;
|
||||
mask-size: 100% 100%;
|
||||
}
|
||||
|
||||
|
||||
.ck-content p {
|
||||
margin-bottom: 0.5em;
|
||||
line-height: 1.3;
|
||||
}
|
||||
|
||||
.ck-content figure.image {
|
||||
width: 25%;
|
||||
}
|
||||
|
||||
.ck-content .table {
|
||||
display: flex;
|
||||
flex-direction: column-reverse;
|
||||
overflow-x: scroll;
|
||||
--scrollbar-thickness: 0;
|
||||
scrollbar-width: none;
|
||||
|
||||
|
||||
table {
|
||||
width: max-content;
|
||||
table-layout: auto;
|
||||
@@ -431,4 +435,4 @@
|
||||
}
|
||||
}
|
||||
|
||||
/* #endregion */
|
||||
/* #endregion */
|
||||
@@ -2,8 +2,8 @@ import "./index.css";
|
||||
|
||||
import { RefObject } from "preact";
|
||||
import { useEffect, useLayoutEffect, useRef, useState } from "preact/hooks";
|
||||
import Reveal, { RevealApi } from "reveal.js";
|
||||
import slideBaseStylesheet from "reveal.js/reveal.css?raw";
|
||||
import Reveal from "reveal.js";
|
||||
import slideBaseStylesheet from "reveal.js/dist/reveal.css?raw";
|
||||
|
||||
import { openInCurrentNoteContext } from "../../../components/note_context";
|
||||
import FNote from "../../../entities/fnote";
|
||||
@@ -20,7 +20,7 @@ import { DEFAULT_THEME, loadPresentationTheme } from "./themes";
|
||||
export default function PresentationView({ note, noteIds, media, onReady, onProgressChanged }: ViewModeProps<{}>) {
|
||||
const [ presentation, setPresentation ] = useState<PresentationModel>();
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const [ api, setApi ] = useState<RevealApi>();
|
||||
const [ api, setApi ] = useState<Reveal.Api>();
|
||||
const stylesheets = usePresentationStylesheets(note, media);
|
||||
|
||||
function refresh() {
|
||||
@@ -98,7 +98,7 @@ function usePresentationStylesheets(note: FNote, media: ViewModeMedia) {
|
||||
return stylesheets;
|
||||
}
|
||||
|
||||
function ButtonOverlay({ containerRef, api }: { containerRef: RefObject<HTMLDivElement>, api: RevealApi | undefined }) {
|
||||
function ButtonOverlay({ containerRef, api }: { containerRef: RefObject<HTMLDivElement>, api: Reveal.Api | undefined }) {
|
||||
const [ isOverviewActive, setIsOverviewActive ] = useState(false);
|
||||
useEffect(() => {
|
||||
if (!api) return;
|
||||
@@ -144,9 +144,9 @@ function ButtonOverlay({ containerRef, api }: { containerRef: RefObject<HTMLDivE
|
||||
);
|
||||
}
|
||||
|
||||
function Presentation({ presentation, setApi } : { presentation: PresentationModel, setApi: (api: RevealApi | undefined) => void }) {
|
||||
function Presentation({ presentation, setApi } : { presentation: PresentationModel, setApi: (api: Reveal.Api | undefined) => void }) {
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const [revealApi, setRevealApi] = useState<RevealApi>();
|
||||
const [revealApi, setRevealApi] = useState<Reveal.Api>();
|
||||
|
||||
useEffect(() => {
|
||||
if (!containerRef.current) return;
|
||||
@@ -222,7 +222,7 @@ function getNoteIdFromSlide(slide: HTMLElement | undefined) {
|
||||
return slide.dataset.noteId;
|
||||
}
|
||||
|
||||
function rewireLinks(container: HTMLElement, api: RevealApi) {
|
||||
function rewireLinks(container: HTMLElement, api: Reveal.Api) {
|
||||
const links = container.querySelectorAll<HTMLLinkElement>("a.reference-link");
|
||||
for (const link of links) {
|
||||
link.addEventListener("click", () => {
|
||||
|
||||
@@ -3,49 +3,49 @@ export const DEFAULT_THEME = "white";
|
||||
const themes = {
|
||||
black: {
|
||||
name: "Black",
|
||||
loadTheme: () => import("reveal.js/theme/black.css?raw")
|
||||
loadTheme: () => import("reveal.js/dist/theme/black.css?raw")
|
||||
},
|
||||
white: {
|
||||
name: "White",
|
||||
loadTheme: () => import("reveal.js/theme/white.css?raw")
|
||||
loadTheme: () => import("reveal.js/dist/theme/white.css?raw")
|
||||
},
|
||||
beige: {
|
||||
name: "Beige",
|
||||
loadTheme: () => import("reveal.js/theme/beige.css?raw")
|
||||
loadTheme: () => import("reveal.js/dist/theme/beige.css?raw")
|
||||
},
|
||||
serif: {
|
||||
name: "Serif",
|
||||
loadTheme: () => import("reveal.js/theme/serif.css?raw")
|
||||
loadTheme: () => import("reveal.js/dist/theme/serif.css?raw")
|
||||
},
|
||||
simple: {
|
||||
name: "Simple",
|
||||
loadTheme: () => import("reveal.js/theme/simple.css?raw")
|
||||
loadTheme: () => import("reveal.js/dist/theme/simple.css?raw")
|
||||
},
|
||||
solarized: {
|
||||
name: "Solarized",
|
||||
loadTheme: () => import("reveal.js/theme/solarized.css?raw")
|
||||
loadTheme: () => import("reveal.js/dist/theme/solarized.css?raw")
|
||||
},
|
||||
moon: {
|
||||
name: "Moon",
|
||||
loadTheme: () => import("reveal.js/theme/moon.css?raw")
|
||||
loadTheme: () => import("reveal.js/dist/theme/moon.css?raw")
|
||||
},
|
||||
dracula: {
|
||||
name: "Dracula",
|
||||
loadTheme: () => import("reveal.js/theme/dracula.css?raw")
|
||||
loadTheme: () => import("reveal.js/dist/theme/dracula.css?raw")
|
||||
},
|
||||
sky: {
|
||||
name: "Sky",
|
||||
loadTheme: () => import("reveal.js/theme/sky.css?raw")
|
||||
loadTheme: () => import("reveal.js/dist/theme/sky.css?raw")
|
||||
},
|
||||
blood: {
|
||||
name: "Blood",
|
||||
loadTheme: () => import("reveal.js/theme/blood.css?raw")
|
||||
loadTheme: () => import("reveal.js/dist/theme/blood.css?raw")
|
||||
}
|
||||
} as const;
|
||||
|
||||
export function getPresentationThemes() {
|
||||
return Object.entries(themes).map(([ id, theme ]) => ({
|
||||
id,
|
||||
id: id,
|
||||
name: theme.name
|
||||
}));
|
||||
}
|
||||
|
||||
@@ -19,13 +19,6 @@ const labelTypeMappings: Record<ColumnType, Partial<ColumnDefinition>> = {
|
||||
text: {
|
||||
editor: "input"
|
||||
},
|
||||
textarea: {
|
||||
editor: "textarea",
|
||||
formatter: "textarea",
|
||||
editorParams: {
|
||||
shiftEnterSubmit: true
|
||||
}
|
||||
},
|
||||
boolean: {
|
||||
formatter: "tickCross",
|
||||
editor: "tickCross"
|
||||
|
||||
@@ -75,9 +75,3 @@
|
||||
font-size: 1.5em;
|
||||
transform: translateY(-50%);
|
||||
}
|
||||
|
||||
.tabulator .tabulator-editable {
|
||||
textarea {
|
||||
padding: 7px !important;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
.note-content-switcher {
|
||||
--badge-radius: 12px;
|
||||
position: relative;
|
||||
display: flex;
|
||||
min-height: 35px;
|
||||
gap: 5px;
|
||||
padding: 5px;
|
||||
flex-wrap: wrap;
|
||||
flex-shrink: 0;
|
||||
font-size: 0.9rem;
|
||||
align-items: center;
|
||||
|
||||
.ext-badge {
|
||||
--color: var(--input-background-color);
|
||||
color: var(--main-text-color);
|
||||
font-size: 1em;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
import "./NoteContentSwitcher.css";
|
||||
|
||||
import FNote from "../../entities/fnote";
|
||||
import server from "../../services/server";
|
||||
import { Badge } from "../react/Badge";
|
||||
import { useNoteSavedData } from "../react/hooks";
|
||||
|
||||
export interface NoteContentTemplate {
|
||||
name: string;
|
||||
content: string;
|
||||
}
|
||||
|
||||
interface NoteContentSwitcherProps {
|
||||
text: string;
|
||||
note: FNote;
|
||||
templates: NoteContentTemplate[];
|
||||
}
|
||||
|
||||
export default function NoteContentSwitcher({ text, note, templates }: NoteContentSwitcherProps) {
|
||||
const blob = useNoteSavedData(note?.noteId);
|
||||
|
||||
return (blob?.trim().length === 0 &&
|
||||
<div className="note-content-switcher">
|
||||
{text}{" "}
|
||||
|
||||
{templates.map(sample => (
|
||||
<Badge
|
||||
key={sample.name}
|
||||
text={sample.name}
|
||||
onClick={async () => {
|
||||
await server.put(`notes/${note.noteId}/data`, {
|
||||
content: sample.content
|
||||
});
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -84,7 +84,7 @@ export const TYPE_MAPPINGS: Record<ExtendedNoteType, NoteTypeMapping> = {
|
||||
printable: true
|
||||
},
|
||||
mermaid: {
|
||||
view: () => import("./type_widgets/mermaid/Mermaid"),
|
||||
view: () => import("./type_widgets/Mermaid"),
|
||||
className: "note-detail-mermaid",
|
||||
printable: true,
|
||||
isFullHeight: true
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { createPortal } from "preact/compat";
|
||||
import { useEffect, useState } from "preact/hooks";
|
||||
|
||||
import FAttribute from "../../entities/fattribute";
|
||||
@@ -75,7 +74,7 @@ export default function InheritedAttributesTab({ note, componentId, emptyListStr
|
||||
)}
|
||||
</div>
|
||||
|
||||
{createPortal(attributeDetailWidgetEl, document.body)}
|
||||
{attributeDetailWidgetEl}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { AttributeEditor as CKEditorAttributeEditor, MentionFeed, ModelElement, ModelNode, ModelPosition } from "@triliumnext/ckeditor5";
|
||||
import { AttributeType } from "@triliumnext/commons";
|
||||
import { createPortal } from "preact/compat";
|
||||
import { MutableRef, useEffect, useImperativeHandle, useMemo, useRef, useState } from "preact/hooks";
|
||||
|
||||
import type { CommandData, FilteredCommandNames } from "../../../components/app_context";
|
||||
@@ -337,8 +336,7 @@ export default function AttributeEditor({ api, note, componentId, notePath, ntxI
|
||||
let matchedAttr: Attribute | null = null;
|
||||
|
||||
for (const attr of parsedAttrs) {
|
||||
if (attr.startIndex !== undefined && clickIndex > attr.startIndex &&
|
||||
attr.endIndex !== undefined && clickIndex <= attr.endIndex) {
|
||||
if (attr.startIndex && clickIndex > attr.startIndex && attr.endIndex && clickIndex <= attr.endIndex) {
|
||||
matchedAttr = attr;
|
||||
break;
|
||||
}
|
||||
@@ -409,7 +407,7 @@ export default function AttributeEditor({ api, note, componentId, notePath, ntxI
|
||||
)}
|
||||
</div>}
|
||||
|
||||
{createPortal(attributeDetailWidgetEl, document.body)}
|
||||
{attributeDetailWidgetEl}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
import { useCallback } from "preact/hooks";
|
||||
|
||||
import { t } from "../../../services/i18n";
|
||||
import { getMermaidConfig, loadElkIfNeeded, postprocessMermaidSvg } from "../../../services/mermaid";
|
||||
import NoteContentSwitcher from "../../layout/NoteContentSwitcher";
|
||||
import SvgSplitEditor from "../helpers/SvgSplitEditor";
|
||||
import { TypeWidgetProps } from "../type_widget";
|
||||
import SAMPLE_DIAGRAMS from "./sample_diagrams";
|
||||
import SvgSplitEditor from "./helpers/SvgSplitEditor";
|
||||
import { TypeWidgetProps } from "./type_widget";
|
||||
import { getMermaidConfig, loadElkIfNeeded, postprocessMermaidSvg } from "../../services/mermaid";
|
||||
|
||||
let idCounter = 1;
|
||||
let registeredErrorReporter = false;
|
||||
@@ -19,10 +15,6 @@ export default function Mermaid(props: TypeWidgetProps) {
|
||||
registeredErrorReporter = true;
|
||||
}
|
||||
|
||||
if (!content.trim()) {
|
||||
return "";
|
||||
}
|
||||
|
||||
mermaid.initialize({
|
||||
startOnLoad: false,
|
||||
...(getMermaidConfig() as any),
|
||||
@@ -38,12 +30,6 @@ export default function Mermaid(props: TypeWidgetProps) {
|
||||
attachmentName="mermaid-export"
|
||||
renderSvg={renderSvg}
|
||||
noteType="mermaid"
|
||||
extraContent={(
|
||||
<NoteContentSwitcher
|
||||
text={t("mermaid.sample_diagrams")}
|
||||
note={props.note}
|
||||
templates={SAMPLE_DIAGRAMS} />
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
@@ -30,7 +30,6 @@ export interface EditableCodeProps extends TypeWidgetProps, Omit<CodeEditorProps
|
||||
onContentChanged?: (content: string) => void;
|
||||
/** Invoked after the content of the note has been uploaded to the server, using a spaced update. */
|
||||
dataSaved?: () => void;
|
||||
placeholder?: string;
|
||||
}
|
||||
|
||||
export function ReadOnlyCode({ note, viewScope, ntxId, parentComponent }: TypeWidgetProps) {
|
||||
@@ -75,7 +74,7 @@ function formatViewSource(note: FNote, content: string) {
|
||||
return content;
|
||||
}
|
||||
|
||||
export function EditableCode({ note, ntxId, noteContext, debounceUpdate, parentComponent, updateInterval, noteType = "code", onContentChanged, dataSaved, placeholder, ...editorProps }: EditableCodeProps) {
|
||||
export function EditableCode({ note, ntxId, noteContext, debounceUpdate, parentComponent, updateInterval, noteType = "code", onContentChanged, dataSaved, ...editorProps }: EditableCodeProps) {
|
||||
const editorRef = useRef<VanillaCodeMirror>(null);
|
||||
const containerRef = useRef<HTMLPreElement>(null);
|
||||
const [ vimKeymapEnabled ] = useTriliumOptionBool("vimKeymapEnabled");
|
||||
@@ -116,7 +115,7 @@ export function EditableCode({ note, ntxId, noteContext, debounceUpdate, parentC
|
||||
editorRef={editorRef} containerRef={containerRef}
|
||||
mime={mime ?? "text/plain"}
|
||||
className="note-detail-code-editor"
|
||||
placeholder={placeholder ?? t("editable_code.placeholder")}
|
||||
placeholder={t("editable_code.placeholder")}
|
||||
vimKeybindings={vimKeymapEnabled}
|
||||
tabIndex={300}
|
||||
onContentChanged={() => {
|
||||
|
||||
@@ -27,18 +27,12 @@
|
||||
margin: 0 !important;
|
||||
}
|
||||
|
||||
body.desktop .note-detail-split .note-detail-code-editor {
|
||||
border-radius: 6px;
|
||||
margin-top: 1px;
|
||||
}
|
||||
|
||||
.note-detail-split .note-detail-error-container {
|
||||
font-family: var(--monospace-font-family);
|
||||
margin: 5px;
|
||||
white-space: pre-wrap;
|
||||
font-size: 0.85em;
|
||||
overflow: auto;
|
||||
user-select: text;
|
||||
}
|
||||
|
||||
.note-detail-split .note-detail-split-preview {
|
||||
|
||||
@@ -19,7 +19,6 @@ export interface SplitEditorProps extends EditableCodeProps {
|
||||
previewButtons?: ComponentChildren;
|
||||
editorBefore?: ComponentChildren;
|
||||
forceOrientation?: "horizontal" | "vertical";
|
||||
extraContent?: ComponentChildren;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -42,7 +41,7 @@ export default function SplitEditor(props: SplitEditorProps) {
|
||||
|
||||
}
|
||||
|
||||
function EditorWithSplit({ note, error, splitOptions, previewContent, previewButtons, className, editorBefore, forceOrientation, extraContent, ...editorProps }: SplitEditorProps) {
|
||||
function EditorWithSplit({ note, error, splitOptions, previewContent, previewButtons, className, editorBefore, forceOrientation, ...editorProps }: SplitEditorProps) {
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const splitEditorOrientation = useSplitOrientation(forceOrientation);
|
||||
|
||||
@@ -58,12 +57,9 @@ function EditorWithSplit({ note, error, splitOptions, previewContent, previewBut
|
||||
{...editorProps}
|
||||
/>
|
||||
</div>
|
||||
{error && (
|
||||
<Admonition type="caution" className="note-detail-error-container">
|
||||
{error}
|
||||
</Admonition>
|
||||
)}
|
||||
{extraContent}
|
||||
{error && <Admonition type="caution" className="note-detail-error-container">
|
||||
{error}
|
||||
</Admonition>}
|
||||
</div>
|
||||
);
|
||||
|
||||
|
||||
@@ -117,7 +117,6 @@ export default function SvgSplitEditor({ ntxId, note, attachmentName, renderSvg,
|
||||
error={error}
|
||||
onContentChanged={onContentChanged}
|
||||
dataSaved={onSave}
|
||||
placeholder={t("mermaid.placeholder")}
|
||||
previewContent={(
|
||||
<RawHtmlBlock
|
||||
className="render-container"
|
||||
@@ -152,7 +151,6 @@ export default function SvgSplitEditor({ ntxId, note, attachmentName, renderSvg,
|
||||
function useResizer(containerRef: RefObject<HTMLDivElement>, noteId: string, svg: string | undefined) {
|
||||
const lastPanZoom = useRef<{ pan: SvgPanZoom.Point, zoom: number }>();
|
||||
const lastNoteId = useRef<string>();
|
||||
const wasEmpty = useRef<boolean>(false);
|
||||
const zoomRef = useRef<SvgPanZoom.Instance>();
|
||||
const width = useElementSize(containerRef);
|
||||
|
||||
@@ -160,14 +158,9 @@ function useResizer(containerRef: RefObject<HTMLDivElement>, noteId: string, svg
|
||||
useEffect(() => {
|
||||
if (zoomRef.current || width?.width === 0) return;
|
||||
|
||||
const shouldPreservePanZoom = (lastNoteId.current === noteId) && !wasEmpty.current;
|
||||
const shouldPreservePanZoom = (lastNoteId.current === noteId);
|
||||
const svgEl = containerRef.current?.querySelector("svg");
|
||||
if (!svgEl) {
|
||||
if (svg?.trim().length === 0) {
|
||||
wasEmpty.current = true;
|
||||
}
|
||||
return;
|
||||
};
|
||||
if (!svgEl) return;
|
||||
|
||||
const zoomInstance = svgPanZoom(svgEl, {
|
||||
zoomEnabled: true,
|
||||
@@ -193,7 +186,7 @@ function useResizer(containerRef: RefObject<HTMLDivElement>, noteId: string, svg
|
||||
zoomRef.current = undefined;
|
||||
zoomInstance.destroy();
|
||||
};
|
||||
}, [ containerRef, noteId, svg, width ]);
|
||||
}, [ svg, width ]);
|
||||
|
||||
// React to container changes.
|
||||
useEffect(() => {
|
||||
|
||||
@@ -1,512 +0,0 @@
|
||||
import { t } from "../../../services/i18n";
|
||||
import type { NoteContentTemplate } from "../../layout/NoteContentSwitcher";
|
||||
|
||||
const SAMPLE_DIAGRAMS: NoteContentTemplate[] = [
|
||||
{
|
||||
name: t("mermaid.sample_flowchart"),
|
||||
content: `\
|
||||
flowchart TD
|
||||
A[Christmas] -->|Get money| B(Go shopping)
|
||||
B --> C{Let me think}
|
||||
C -->|One| D[Laptop]
|
||||
C -->|Two| E[iPhone]
|
||||
C -->|Three| F[fa:fa-car Car]
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_class"),
|
||||
content: `\
|
||||
classDiagram
|
||||
Animal <|-- Duck
|
||||
Animal <|-- Fish
|
||||
Animal <|-- Zebra
|
||||
Animal : +int age
|
||||
Animal : +String gender
|
||||
Animal: +isMammal()
|
||||
Animal: +mate()
|
||||
class Duck{
|
||||
+String beakColor
|
||||
+swim()
|
||||
+quack()
|
||||
}
|
||||
class Fish{
|
||||
-int sizeInFeet
|
||||
-canEat()
|
||||
}
|
||||
class Zebra{
|
||||
+bool is_wild
|
||||
+run()
|
||||
}
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_sequence"),
|
||||
content: `\
|
||||
sequenceDiagram
|
||||
Alice->>+John: Hello John, how are you?
|
||||
Alice->>+John: John, can you hear me?
|
||||
John-->>-Alice: Hi Alice, I can hear you!
|
||||
John-->>-Alice: I feel great!
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_entity_relationship"),
|
||||
content: `\
|
||||
erDiagram
|
||||
CUSTOMER ||--o{ ORDER : places
|
||||
ORDER ||--|{ ORDER_ITEM : contains
|
||||
PRODUCT ||--o{ ORDER_ITEM : includes
|
||||
CUSTOMER {
|
||||
string id
|
||||
string name
|
||||
string email
|
||||
}
|
||||
ORDER {
|
||||
string id
|
||||
date orderDate
|
||||
string status
|
||||
}
|
||||
PRODUCT {
|
||||
string id
|
||||
string name
|
||||
float price
|
||||
}
|
||||
ORDER_ITEM {
|
||||
int quantity
|
||||
float price
|
||||
}
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_state"),
|
||||
content: `\
|
||||
stateDiagram-v2
|
||||
[*] --> Still
|
||||
Still --> [*]
|
||||
Still --> Moving
|
||||
Moving --> Still
|
||||
Moving --> Crash
|
||||
Crash --> [*]
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_mindmap"),
|
||||
content: `\
|
||||
mindmap
|
||||
root((mindmap))
|
||||
Origins
|
||||
Long history
|
||||
::icon(fa fa-book)
|
||||
Popularisation
|
||||
British popular psychology author Tony Buzan
|
||||
Research
|
||||
On effectiveness<br/>and features
|
||||
On Automatic creation
|
||||
Uses
|
||||
Creative techniques
|
||||
Strategic planning
|
||||
Argument mapping
|
||||
Tools
|
||||
Pen and paper
|
||||
Mermaid
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_architecture"),
|
||||
content: `\
|
||||
architecture-beta
|
||||
group api(cloud)[API]
|
||||
|
||||
service db(database)[Database] in api
|
||||
service disk1(disk)[Storage] in api
|
||||
service disk2(disk)[Storage] in api
|
||||
service server(server)[Server] in api
|
||||
|
||||
db:L -- R:server
|
||||
disk1:T -- B:server
|
||||
disk2:T -- B:db
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_block"),
|
||||
content: `\
|
||||
block-beta
|
||||
columns 1
|
||||
db(("DB"))
|
||||
blockArrowId6<[" "]>(down)
|
||||
block:ID
|
||||
A
|
||||
B["A wide one in the middle"]
|
||||
C
|
||||
end
|
||||
space
|
||||
D
|
||||
ID --> D
|
||||
C --> D
|
||||
style B fill:#969,stroke:#333,stroke-width:4px
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_c4"),
|
||||
content: `\
|
||||
C4Context
|
||||
title System Context diagram for Internet Banking System
|
||||
Enterprise_Boundary(b0, "BankBoundary0") {
|
||||
Person(customerA, "Banking Customer A", "A customer of the bank, with personal bank accounts.")
|
||||
Person(customerB, "Banking Customer B")
|
||||
Person_Ext(customerC, "Banking Customer C", "desc")
|
||||
|
||||
Person(customerD, "Banking Customer D", "A customer of the bank, <br/> with personal bank accounts.")
|
||||
|
||||
System(SystemAA, "Internet Banking System", "Allows customers to view information about their bank accounts, and make payments.")
|
||||
|
||||
Enterprise_Boundary(b1, "BankBoundary") {
|
||||
SystemDb_Ext(SystemE, "Mainframe Banking System", "Stores all of the core banking information about customers, accounts, transactions, etc.")
|
||||
|
||||
System_Boundary(b2, "BankBoundary2") {
|
||||
System(SystemA, "Banking System A")
|
||||
System(SystemB, "Banking System B", "A system of the bank, with personal bank accounts. next line.")
|
||||
}
|
||||
|
||||
System_Ext(SystemC, "E-mail system", "The internal Microsoft Exchange e-mail system.")
|
||||
SystemDb(SystemD, "Banking System D Database", "A system of the bank, with personal bank accounts.")
|
||||
|
||||
Boundary(b3, "BankBoundary3", "boundary") {
|
||||
SystemQueue(SystemF, "Banking System F Queue", "A system of the bank.")
|
||||
SystemQueue_Ext(SystemG, "Banking System G Queue", "A system of the bank, with personal bank accounts.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
BiRel(customerA, SystemAA, "Uses")
|
||||
BiRel(SystemAA, SystemE, "Uses")
|
||||
Rel(SystemAA, SystemC, "Sends e-mails", "SMTP")
|
||||
Rel(SystemC, customerA, "Sends e-mails to")
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_gantt"),
|
||||
content: `\
|
||||
gantt
|
||||
title A Gantt Diagram
|
||||
dateFormat YYYY-MM-DD
|
||||
section Section
|
||||
A task :a1, 2014-01-01, 30d
|
||||
Another task :after a1 , 20d
|
||||
section Another
|
||||
Task in sec :2014-01-12 , 12d
|
||||
another task : 24d
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_git"),
|
||||
content: `\
|
||||
gitGraph
|
||||
commit
|
||||
branch develop
|
||||
checkout develop
|
||||
commit
|
||||
commit
|
||||
checkout main
|
||||
merge develop
|
||||
commit
|
||||
branch feature
|
||||
checkout feature
|
||||
commit
|
||||
commit
|
||||
checkout main
|
||||
merge feature
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_kanban"),
|
||||
content: `\
|
||||
---
|
||||
config:
|
||||
kanban:
|
||||
ticketBaseUrl: 'https://github.com/mermaid-js/mermaid/issues/#TICKET#'
|
||||
---
|
||||
kanban
|
||||
Todo
|
||||
[Create Documentation]
|
||||
docs[Create Blog about the new diagram]
|
||||
[In progress]
|
||||
id6[Create renderer so that it works in all cases. We also add some extra text here for testing purposes. And some more just for the extra flare.]
|
||||
id9[Ready for deploy]
|
||||
id8[Design grammar]@{ assigned: 'knsv' }
|
||||
id10[Ready for test]
|
||||
id4[Create parsing tests]@{ ticket: 2038, assigned: 'K.Sveidqvist', priority: 'High' }
|
||||
id66[last item]@{ priority: 'Very Low', assigned: 'knsv' }
|
||||
id11[Done]
|
||||
id5[define getData]
|
||||
id2[Title of diagram is more than 100 chars when user duplicates diagram with 100 char]@{ ticket: 2036, priority: 'Very High'}
|
||||
id3[Update DB function]@{ ticket: 2037, assigned: knsv, priority: 'High' }
|
||||
|
||||
id12[Can't reproduce]
|
||||
id3[Weird flickering in Firefox]
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_packet"),
|
||||
content: `\
|
||||
---
|
||||
title: "TCP Packet"
|
||||
---
|
||||
packet
|
||||
0-15: "Source Port"
|
||||
16-31: "Destination Port"
|
||||
32-63: "Sequence Number"
|
||||
64-95: "Acknowledgment Number"
|
||||
96-99: "Data Offset"
|
||||
100-105: "Reserved"
|
||||
106: "URG"
|
||||
107: "ACK"
|
||||
108: "PSH"
|
||||
109: "RST"
|
||||
110: "SYN"
|
||||
111: "FIN"
|
||||
112-127: "Window"
|
||||
128-143: "Checksum"
|
||||
144-159: "Urgent Pointer"
|
||||
160-191: "(Options and Padding)"
|
||||
192-255: "Data (variable length)"
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_pie"),
|
||||
content: `\
|
||||
pie title Pets adopted by volunteers
|
||||
"Dogs" : 386
|
||||
"Cats" : 85
|
||||
"Rats" : 15
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_quadrant"),
|
||||
content: `\
|
||||
quadrantChart
|
||||
title Reach and engagement of campaigns
|
||||
x-axis Low Reach --> High Reach
|
||||
y-axis Low Engagement --> High Engagement
|
||||
quadrant-1 We should expand
|
||||
quadrant-2 Need to promote
|
||||
quadrant-3 Re-evaluate
|
||||
quadrant-4 May be improved
|
||||
Campaign A: [0.3, 0.6]
|
||||
Campaign B: [0.45, 0.23]
|
||||
Campaign C: [0.57, 0.69]
|
||||
Campaign D: [0.78, 0.34]
|
||||
Campaign E: [0.40, 0.34]
|
||||
Campaign F: [0.35, 0.78]
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_radar"),
|
||||
content: `\
|
||||
---
|
||||
title: "Grades"
|
||||
---
|
||||
radar-beta
|
||||
axis m["Math"], s["Science"], e["English"]
|
||||
axis h["History"], g["Geography"], a["Art"]
|
||||
curve a["Alice"]{85, 90, 80, 70, 75, 90}
|
||||
curve b["Bob"]{70, 75, 85, 80, 90, 85}
|
||||
|
||||
max 100
|
||||
min 0
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_requirement"),
|
||||
content: `\
|
||||
requirementDiagram
|
||||
|
||||
requirement test_req {
|
||||
id: 1
|
||||
text: the test text.
|
||||
risk: high
|
||||
verifymethod: test
|
||||
}
|
||||
|
||||
element test_entity {
|
||||
type: simulation
|
||||
}
|
||||
|
||||
test_entity - satisfies -> test_req
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_sankey"),
|
||||
content: `\
|
||||
---
|
||||
config:
|
||||
sankey:
|
||||
showValues: false
|
||||
---
|
||||
sankey-beta
|
||||
|
||||
Agricultural 'waste',Bio-conversion,124.729
|
||||
Bio-conversion,Liquid,0.597
|
||||
Bio-conversion,Losses,26.862
|
||||
Bio-conversion,Solid,280.322
|
||||
Bio-conversion,Gas,81.144
|
||||
Biofuel imports,Liquid,35
|
||||
Biomass imports,Solid,35
|
||||
Coal imports,Coal,11.606
|
||||
Coal reserves,Coal,63.965
|
||||
Coal,Solid,75.571
|
||||
District heating,Industry,10.639
|
||||
District heating,Heating and cooling - commercial,22.505
|
||||
District heating,Heating and cooling - homes,46.184
|
||||
Electricity grid,Over generation / exports,104.453
|
||||
Electricity grid,Heating and cooling - homes,113.726
|
||||
Electricity grid,H2 conversion,27.14
|
||||
Electricity grid,Industry,342.165
|
||||
Electricity grid,Road transport,37.797
|
||||
Electricity grid,Agriculture,4.412
|
||||
Electricity grid,Heating and cooling - commercial,40.858
|
||||
Electricity grid,Losses,56.691
|
||||
Electricity grid,Rail transport,7.863
|
||||
Electricity grid,Lighting & appliances - commercial,90.008
|
||||
Electricity grid,Lighting & appliances - homes,93.494
|
||||
Gas imports,NGas,40.719
|
||||
Gas reserves,NGas,82.233
|
||||
Gas,Heating and cooling - commercial,0.129
|
||||
Gas,Losses,1.401
|
||||
Gas,Thermal generation,151.891
|
||||
Gas,Agriculture,2.096
|
||||
Gas,Industry,48.58
|
||||
Geothermal,Electricity grid,7.013
|
||||
H2 conversion,H2,20.897
|
||||
H2 conversion,Losses,6.242
|
||||
H2,Road transport,20.897
|
||||
Hydro,Electricity grid,6.995
|
||||
Liquid,Industry,121.066
|
||||
Liquid,International shipping,128.69
|
||||
Liquid,Road transport,135.835
|
||||
Liquid,Domestic aviation,14.458
|
||||
Liquid,International aviation,206.267
|
||||
Liquid,Agriculture,3.64
|
||||
Liquid,National navigation,33.218
|
||||
Liquid,Rail transport,4.413
|
||||
Marine algae,Bio-conversion,4.375
|
||||
NGas,Gas,122.952
|
||||
Nuclear,Thermal generation,839.978
|
||||
Oil imports,Oil,504.287
|
||||
Oil reserves,Oil,107.703
|
||||
Oil,Liquid,611.99
|
||||
Other waste,Solid,56.587
|
||||
Other waste,Bio-conversion,77.81
|
||||
Pumped heat,Heating and cooling - homes,193.026
|
||||
Pumped heat,Heating and cooling - commercial,70.672
|
||||
Solar PV,Electricity grid,59.901
|
||||
Solar Thermal,Heating and cooling - homes,19.263
|
||||
Solar,Solar Thermal,19.263
|
||||
Solar,Solar PV,59.901
|
||||
Solid,Agriculture,0.882
|
||||
Solid,Thermal generation,400.12
|
||||
Solid,Industry,46.477
|
||||
Thermal generation,Electricity grid,525.531
|
||||
Thermal generation,Losses,787.129
|
||||
Thermal generation,District heating,79.329
|
||||
Tidal,Electricity grid,9.452
|
||||
UK land based bioenergy,Bio-conversion,182.01
|
||||
Wave,Electricity grid,19.013
|
||||
Wind,Electricity grid,289.366
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_timeline"),
|
||||
content: `\
|
||||
timeline
|
||||
title History of Social Media Platform
|
||||
2002 : LinkedIn
|
||||
2004 : Facebook
|
||||
: Google
|
||||
2005 : YouTube
|
||||
2006 : Twitter
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_treemap"),
|
||||
content: `\
|
||||
treemap-beta
|
||||
"Section 1"
|
||||
"Leaf 1.1": 12
|
||||
"Section 1.2"
|
||||
"Leaf 1.2.1": 12
|
||||
"Section 2"
|
||||
"Leaf 2.1": 20
|
||||
"Leaf 2.2": 25
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_user_journey"),
|
||||
content: `\
|
||||
journey
|
||||
title My working day
|
||||
section Go to work
|
||||
Make tea: 5: Me
|
||||
Go upstairs: 3: Me
|
||||
Do work: 1: Me, Cat
|
||||
section Go home
|
||||
Go downstairs: 5: Me
|
||||
Sit down: 5: Me
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_xy"),
|
||||
content: `\
|
||||
xychart-beta
|
||||
title "Sales Revenue"
|
||||
x-axis [jan, feb, mar, apr, may, jun, jul, aug, sep, oct, nov, dec]
|
||||
y-axis "Revenue (in $)" 4000 --> 11000
|
||||
bar [5000, 6000, 7500, 8200, 9500, 10500, 11000, 10200, 9200, 8500, 7000, 6000]
|
||||
line [5000, 6000, 7500, 8200, 9500, 10500, 11000, 10200, 9200, 8500, 7000, 6000]
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_venn"),
|
||||
content: `\
|
||||
venn-beta
|
||||
title Web Dev
|
||||
set Frontend
|
||||
text React
|
||||
text shadcn-ui
|
||||
text Firebase
|
||||
set Backend
|
||||
text Hono
|
||||
text PostgreSQL
|
||||
text S3
|
||||
text Lambda
|
||||
union Frontend,Backend["APIs"]
|
||||
`
|
||||
},
|
||||
{
|
||||
name: t("mermaid.sample_ishikawa"),
|
||||
content: `\
|
||||
ishikawa-beta
|
||||
Blurry Photo
|
||||
Process
|
||||
Out of focus
|
||||
Shutter speed too slow
|
||||
Protective film not removed
|
||||
Beautification filter applied
|
||||
User
|
||||
Shaky hands
|
||||
Equipment
|
||||
LENS
|
||||
Inappropriate lens
|
||||
Damaged lens
|
||||
Dirty lens
|
||||
SENSOR
|
||||
Damaged sensor
|
||||
Dirty sensor
|
||||
Environment
|
||||
Subject moved too quickly
|
||||
Too dark
|
||||
`
|
||||
}
|
||||
];
|
||||
|
||||
export default SAMPLE_DIAGRAMS;
|
||||
@@ -21,6 +21,7 @@ import TimeSelector from "./components/TimeSelector";
|
||||
export default function OtherSettings() {
|
||||
return (
|
||||
<>
|
||||
<SearchSettings />
|
||||
{isElectron() && <>
|
||||
<SearchEngineSettings />
|
||||
<TrayOptionsSettings />
|
||||
@@ -36,6 +37,21 @@ export default function OtherSettings() {
|
||||
);
|
||||
}
|
||||
|
||||
function SearchSettings() {
|
||||
const [ fuzzyEnabled, setFuzzyEnabled ] = useTriliumOptionBool("searchEnableFuzzyMatching");
|
||||
|
||||
return (
|
||||
<OptionsSection title={t("search.title")}>
|
||||
<FormCheckbox
|
||||
name="search-fuzzy-matching"
|
||||
label={t("search.enable_fuzzy_matching")}
|
||||
currentValue={fuzzyEnabled}
|
||||
onChange={setFuzzyEnabled}
|
||||
/>
|
||||
</OptionsSection>
|
||||
);
|
||||
}
|
||||
|
||||
function SearchEngineSettings() {
|
||||
const [ customSearchEngineName, setCustomSearchEngineName ] = useTriliumOption("customSearchEngineName");
|
||||
const [ customSearchEngineUrl, setCustomSearchEngineUrl ] = useTriliumOption("customSearchEngineUrl");
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
import { Connection } from "jsplumb";
|
||||
import { RefObject } from "preact";
|
||||
|
||||
import appContext from "../../../components/app_context";
|
||||
import FNote from "../../../entities/fnote";
|
||||
import contextMenu from "../../../menus/context_menu";
|
||||
import link_context_menu from "../../../menus/link_context_menu";
|
||||
import dialog from "../../../services/dialog";
|
||||
import { t } from "../../../services/i18n";
|
||||
import server from "../../../services/server";
|
||||
import RelationMapApi from "./api";
|
||||
import { Connection } from "jsplumb";
|
||||
|
||||
export function buildNoteContextMenuHandler(note: FNote | null | undefined, mapApiRef: RefObject<RelationMapApi>) {
|
||||
return (e: MouseEvent) => {
|
||||
@@ -19,8 +17,22 @@ export function buildNoteContextMenuHandler(note: FNote | null | undefined, mapA
|
||||
x: e.pageX,
|
||||
y: e.pageY,
|
||||
items: [
|
||||
...link_context_menu.getItems(e),
|
||||
{ kind: "separator" },
|
||||
{
|
||||
title: t("relation_map.open_in_new_tab"),
|
||||
uiIcon: "bx bx-empty",
|
||||
handler: () => appContext.tabManager.openTabWithNoteWithHoisting(note.noteId)
|
||||
},
|
||||
{
|
||||
title: t("relation_map.remove_note"),
|
||||
uiIcon: "bx bx-trash",
|
||||
handler: async () => {
|
||||
if (!note) return;
|
||||
const result = await dialog.confirmDeleteNoteBoxWithNote(note.title);
|
||||
if (typeof result !== "object" || !result.confirmed) return;
|
||||
|
||||
mapApiRef.current?.removeItem(note.noteId, result.isDeleteNoteChecked);
|
||||
}
|
||||
},
|
||||
{
|
||||
title: t("relation_map.edit_title"),
|
||||
uiIcon: "bx bx-pencil",
|
||||
@@ -37,26 +49,10 @@ export function buildNoteContextMenuHandler(note: FNote | null | undefined, mapA
|
||||
|
||||
await server.put(`notes/${note.noteId}/title`, { title });
|
||||
}
|
||||
},
|
||||
{ kind: "separator" },
|
||||
|
||||
{
|
||||
title: t("relation_map.remove_note"),
|
||||
uiIcon: "bx bx-trash",
|
||||
handler: async () => {
|
||||
if (!note) return;
|
||||
const result = await dialog.confirmDeleteNoteBoxWithNote(note.title);
|
||||
if (typeof result !== "object" || !result.confirmed) return;
|
||||
|
||||
mapApiRef.current?.removeItem(note.noteId, result.isDeleteNoteChecked);
|
||||
}
|
||||
},
|
||||
}
|
||||
],
|
||||
selectMenuItemHandler({ command }) {
|
||||
// Pass the events to the link context menu
|
||||
link_context_menu.handleLinkContextMenuItem(command, e, note.noteId);
|
||||
}
|
||||
});
|
||||
selectMenuItemHandler() {}
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,8 @@
|
||||
"preact"
|
||||
],
|
||||
"rootDir": "src",
|
||||
"jsx": "react-jsx",
|
||||
"jsx": "preserve",
|
||||
"jsxFactory": "h",
|
||||
"jsxImportSource": "preact",
|
||||
"module": "esnext",
|
||||
"moduleResolution": "bundler",
|
||||
|
||||
@@ -6,7 +6,8 @@
|
||||
"node",
|
||||
"vitest"
|
||||
],
|
||||
"jsx": "react-jsx",
|
||||
"jsx": "preserve",
|
||||
"jsxFactory": "h",
|
||||
"jsxImportSource": "preact",
|
||||
"module": "esnext",
|
||||
"moduleResolution": "bundler"
|
||||
|
||||
@@ -103,7 +103,10 @@ export default defineConfig(() => ({
|
||||
return "src/[name].js";
|
||||
},
|
||||
chunkFileNames: "src/[name]-[hash].js",
|
||||
assetFileNames: "src/[name]-[hash].[ext]"
|
||||
assetFileNames: "src/[name]-[hash].[ext]",
|
||||
manualChunks: {
|
||||
"ckeditor5": [ "@triliumnext/ckeditor5" ]
|
||||
},
|
||||
},
|
||||
onwarn(warning, rollupWarn) {
|
||||
if (warning.code === "MODULE_LEVEL_DIRECTIVE") {
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"colors": "1.4.0",
|
||||
"diff": "8.0.3",
|
||||
"sqlite": "5.1.1",
|
||||
"sqlite3": "6.0.1"
|
||||
"sqlite3": "5.1.7"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "tsx src/compare.ts",
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@electron/remote": "2.1.3",
|
||||
"better-sqlite3": "12.8.0",
|
||||
"better-sqlite3": "12.6.2",
|
||||
"electron-debug": "4.1.0",
|
||||
"electron-dl": "4.0.0",
|
||||
"electron-squirrel-startup": "1.0.1",
|
||||
@@ -35,7 +35,7 @@
|
||||
"@triliumnext/commons": "workspace:*",
|
||||
"@triliumnext/server": "workspace:*",
|
||||
"copy-webpack-plugin": "14.0.0",
|
||||
"electron": "41.0.3",
|
||||
"electron": "40.8.0",
|
||||
"@electron-forge/cli": "7.11.1",
|
||||
"@electron-forge/maker-deb": "7.11.1",
|
||||
"@electron-forge/maker-dmg": "7.11.1",
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
"description": "Standalone tool to dump contents of Trilium document.db file into a directory tree of notes",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"better-sqlite3": "12.8.0",
|
||||
"better-sqlite3": "12.6.2",
|
||||
"mime-types": "3.0.2",
|
||||
"sanitize-filename": "1.6.4",
|
||||
"sanitize-filename": "1.6.3",
|
||||
"tsx": "4.21.0",
|
||||
"yargs": "18.0.0"
|
||||
},
|
||||
|
||||
@@ -5,14 +5,14 @@
|
||||
"description": "Desktop version of Trilium which imports the demo database (presented to new users at start-up) or the user guide and other documentation and saves the modifications for committing.",
|
||||
"dependencies": {
|
||||
"archiver": "7.0.1",
|
||||
"better-sqlite3": "12.8.0"
|
||||
"better-sqlite3": "12.6.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@triliumnext/client": "workspace:*",
|
||||
"@triliumnext/desktop": "workspace:*",
|
||||
"@types/fs-extra": "11.0.4",
|
||||
"copy-webpack-plugin": "14.0.0",
|
||||
"electron": "41.0.3",
|
||||
"electron": "40.8.0",
|
||||
"fs-extra": "11.3.4"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -75,10 +75,8 @@ test("Tabs are restored in right order", async ({ page, context }) => {
|
||||
await expect(app.getActiveTab()).toContainText("Mermaid");
|
||||
|
||||
// Select the mid one.
|
||||
const recentNotesSaved = page.waitForResponse((resp) => resp.url().includes("/api/recent-notes") && resp.ok());
|
||||
await (await app.getTab(1)).click();
|
||||
await expect(app.noteTreeActiveNote).toContainText("Text notes");
|
||||
await recentNotesSaved;
|
||||
|
||||
// Refresh the page and check the order.
|
||||
await app.goto( { preserveTabs: true });
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"better-sqlite3": "12.8.0"
|
||||
"better-sqlite3": "12.6.2"
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,6 @@
|
||||
"main": "./src/main.ts",
|
||||
"scripts": {
|
||||
"dev": "cross-env NODE_ENV=development TRILIUM_ENV=dev TRILIUM_DATA_DIR=data TRILIUM_RESOURCE_DIR=src tsx watch --ignore '../client/node_modules/.vite-temp' ./src/main.ts",
|
||||
"dev-alt": "cross-env NODE_ENV=development TRILIUM_ENV=dev TRILIUM_DATA_DIR=data2 TRILIUM_RESOURCE_DIR=src tsx watch --ignore '../client/node_modules/.vite-temp' ./src/main.ts",
|
||||
"start-no-dir": "cross-env NODE_ENV=development TRILIUM_ENV=dev TRILIUM_RESOURCE_DIR=src tsx watch --ignore '../client/node_modules/.vite-temp' ./src/main.ts",
|
||||
"edit-integration-db": "cross-env NODE_ENV=development TRILIUM_PORT=8086 TRILIUM_ENV=dev TRILIUM_DATA_DIR=spec/db TRILIUM_INTEGRATION_TEST=edit TRILIUM_RESOURCE_DIR=src tsx watch --ignore '../client/node_modules/.vite-temp' ./src/main.ts",
|
||||
"build": "tsx scripts/build.ts",
|
||||
@@ -30,7 +29,7 @@
|
||||
"proxy-nginx-subdir": "docker run --name trilium-nginx-subdir --rm --network=host -v ./docker/nginx.conf:/etc/nginx/conf.d/default.conf:ro nginx:latest"
|
||||
},
|
||||
"dependencies": {
|
||||
"better-sqlite3": "12.8.0",
|
||||
"better-sqlite3": "12.6.2",
|
||||
"html-to-text": "9.0.5",
|
||||
"node-html-parser": "7.1.0",
|
||||
"sucrase": "3.35.1"
|
||||
@@ -79,11 +78,11 @@
|
||||
"cls-hooked": "4.2.2",
|
||||
"compression": "1.8.1",
|
||||
"cookie-parser": "1.4.7",
|
||||
"csrf-csrf": "4.0.3",
|
||||
"csrf-csrf": "3.2.2",
|
||||
"debounce": "3.0.0",
|
||||
"debug": "4.4.3",
|
||||
"ejs": "5.0.1",
|
||||
"electron": "41.0.3",
|
||||
"electron": "40.8.0",
|
||||
"electron-debug": "4.1.0",
|
||||
"electron-window-state": "5.0.3",
|
||||
"escape-html": "1.0.3",
|
||||
@@ -97,9 +96,9 @@
|
||||
"helmet": "8.1.0",
|
||||
"html": "1.0.0",
|
||||
"html2plaintext": "2.1.4",
|
||||
"http-proxy-agent": "8.0.0",
|
||||
"https-proxy-agent": "8.0.0",
|
||||
"i18next": "25.10.3",
|
||||
"http-proxy-agent": "7.0.2",
|
||||
"https-proxy-agent": "7.0.6",
|
||||
"i18next": "25.8.17",
|
||||
"i18next-fs-backend": "2.6.1",
|
||||
"image-type": "6.0.0",
|
||||
"ini": "6.0.0",
|
||||
@@ -107,15 +106,15 @@
|
||||
"is-svg": "6.1.0",
|
||||
"jimp": "1.6.0",
|
||||
"lorem-ipsum": "2.0.8",
|
||||
"marked": "17.0.5",
|
||||
"marked": "17.0.4",
|
||||
"mime-types": "3.0.2",
|
||||
"multer": "2.1.1",
|
||||
"normalize-strings": "1.1.1",
|
||||
"rand-token": "1.0.1",
|
||||
"safe-compare": "1.1.4",
|
||||
"sanitize-filename": "1.6.4",
|
||||
"sanitize-html": "2.17.2",
|
||||
"sax": "1.6.0",
|
||||
"sanitize-filename": "1.6.3",
|
||||
"sanitize-html": "2.17.1",
|
||||
"sax": "1.5.0",
|
||||
"serve-favicon": "2.5.1",
|
||||
"stream-throttle": "0.1.3",
|
||||
"strip-bom": "5.0.0",
|
||||
@@ -126,8 +125,8 @@
|
||||
"tmp": "0.2.5",
|
||||
"turnish": "1.8.0",
|
||||
"unescape": "1.0.1",
|
||||
"vite": "8.0.1",
|
||||
"ws": "8.20.0",
|
||||
"vite": "7.3.1",
|
||||
"ws": "8.19.0",
|
||||
"xml2js": "0.6.2",
|
||||
"yauzl": "3.2.1"
|
||||
}
|
||||
|
||||
306
apps/server/spec/search_profiling.spec.ts
Normal file
306
apps/server/spec/search_profiling.spec.ts
Normal file
@@ -0,0 +1,306 @@
|
||||
/**
|
||||
* Integration-level search profiling test.
|
||||
*
|
||||
* Uses the real SQLite database (spec/db/document.db loaded in-memory),
|
||||
* real sql module, real becca cache, and the full app stack.
|
||||
*
|
||||
* Profiles search at large scale (50K+ notes) to match real-world
|
||||
* performance reports from users with 240K+ notes.
|
||||
*/
|
||||
import { Application } from "express";
|
||||
import { beforeAll, describe, expect, it } from "vitest";
|
||||
import config from "../src/services/config.js";
|
||||
|
||||
let app: Application;
|
||||
|
||||
function timed<T>(fn: () => T): [T, number] {
|
||||
const start = performance.now();
|
||||
const result = fn();
|
||||
return [result, performance.now() - start];
|
||||
}
|
||||
|
||||
function randomId(len = 12): string {
|
||||
const chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
let id = "";
|
||||
for (let i = 0; i < len; i++) id += chars[Math.floor(Math.random() * chars.length)];
|
||||
return id;
|
||||
}
|
||||
|
||||
function randomWord(len = 8): string {
|
||||
const chars = "abcdefghijklmnopqrstuvwxyz";
|
||||
let w = "";
|
||||
for (let i = 0; i < len; i++) w += chars[Math.floor(Math.random() * chars.length)];
|
||||
return w;
|
||||
}
|
||||
|
||||
function generateContent(wordCount: number, keyword?: string): string {
|
||||
const paragraphs: string[] = [];
|
||||
let remaining = wordCount;
|
||||
let injected = false;
|
||||
while (remaining > 0) {
|
||||
const n = Math.min(remaining, 30 + Math.floor(Math.random() * 30));
|
||||
const words: string[] = [];
|
||||
for (let i = 0; i < n; i++) words.push(randomWord(3 + Math.floor(Math.random() * 10)));
|
||||
if (keyword && !injected && remaining < wordCount / 2) {
|
||||
words[Math.floor(words.length / 2)] = keyword;
|
||||
injected = true;
|
||||
}
|
||||
paragraphs.push(`<p>${words.join(" ")}</p>`);
|
||||
remaining -= n;
|
||||
}
|
||||
return paragraphs.join("\n");
|
||||
}
|
||||
|
||||
describe("Search profiling (integration)", () => {
|
||||
beforeAll(async () => {
|
||||
config.General.noAuthentication = true;
|
||||
const buildApp = (await import("../src/app.js")).default;
|
||||
app = await buildApp();
|
||||
});
|
||||
|
||||
it("large-scale profiling (50K notes)", async () => {
|
||||
const sql = (await import("../src/services/sql.js")).default;
|
||||
const becca = (await import("../src/becca/becca.js")).default;
|
||||
const beccaLoader = (await import("../src/becca/becca_loader.js")).default;
|
||||
const cls = (await import("../src/services/cls.js")).default;
|
||||
const searchService = (await import("../src/services/search/services/search.js")).default;
|
||||
const SearchContext = (await import("../src/services/search/search_context.js")).default;
|
||||
const beccaService = (await import("../src/becca/becca_service.js")).default;
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
cls.init(() => {
|
||||
const initialNoteCount = Object.keys(becca.notes).length;
|
||||
console.log(`\n Initial becca notes: ${initialNoteCount}`);
|
||||
|
||||
// ── Seed 50K notes with hierarchy ──
|
||||
// Some folders (depth), some with common keyword "test" in title
|
||||
const TOTAL_NOTES = 50000;
|
||||
const FOLDER_COUNT = 500; // 500 folders
|
||||
const NOTES_PER_FOLDER = (TOTAL_NOTES - FOLDER_COUNT) / FOLDER_COUNT; // ~99 notes per folder
|
||||
const MATCH_FRACTION = 0.10; // 10% match "test" — ~5000 notes
|
||||
const CONTENT_WORDS = 500;
|
||||
|
||||
const now = new Date().toISOString().replace("T", " ").replace("Z", "+0000");
|
||||
console.log(` Seeding ${TOTAL_NOTES} notes (${FOLDER_COUNT} folders, ~${NOTES_PER_FOLDER.toFixed(0)} per folder)...`);
|
||||
|
||||
const [, seedMs] = timed(() => {
|
||||
sql.transactional(() => {
|
||||
const folderIds: string[] = [];
|
||||
|
||||
// Create folders under root
|
||||
for (let f = 0; f < FOLDER_COUNT; f++) {
|
||||
const noteId = `seed${randomId(8)}`;
|
||||
const branchId = `seed${randomId(8)}`;
|
||||
const blobId = `seed${randomId(16)}`;
|
||||
folderIds.push(noteId);
|
||||
|
||||
sql.execute(
|
||||
`INSERT INTO blobs (blobId, content, dateModified, utcDateModified) VALUES (?, ?, ?, ?)`,
|
||||
[blobId, `<p>Folder ${f}</p>`, now, now]
|
||||
);
|
||||
sql.execute(
|
||||
`INSERT INTO notes (noteId, title, type, mime, blobId, isProtected, isDeleted,
|
||||
dateCreated, dateModified, utcDateCreated, utcDateModified)
|
||||
VALUES (?, ?, 'text', 'text/html', ?, 0, 0, ?, ?, ?, ?)`,
|
||||
[noteId, `Folder ${f} ${randomWord(5)}`, blobId, now, now, now, now]
|
||||
);
|
||||
sql.execute(
|
||||
`INSERT INTO branches (branchId, noteId, parentNoteId, notePosition, isDeleted, isExpanded, utcDateModified)
|
||||
VALUES (?, ?, 'root', ?, 0, 0, ?)`,
|
||||
[branchId, noteId, f * 10, now]
|
||||
);
|
||||
}
|
||||
|
||||
// Create notes under folders
|
||||
let noteIdx = 0;
|
||||
for (let f = 0; f < FOLDER_COUNT; f++) {
|
||||
const parentId = folderIds[f];
|
||||
for (let n = 0; n < NOTES_PER_FOLDER; n++) {
|
||||
const isMatch = noteIdx < TOTAL_NOTES * MATCH_FRACTION;
|
||||
const noteId = `seed${randomId(8)}`;
|
||||
const branchId = `seed${randomId(8)}`;
|
||||
const blobId = `seed${randomId(16)}`;
|
||||
const title = isMatch
|
||||
? `Test Document ${noteIdx} ${randomWord(6)}`
|
||||
: `Note ${noteIdx} ${randomWord(6)} ${randomWord(5)}`;
|
||||
const content = generateContent(CONTENT_WORDS, isMatch ? "test" : undefined);
|
||||
|
||||
sql.execute(
|
||||
`INSERT INTO blobs (blobId, content, dateModified, utcDateModified) VALUES (?, ?, ?, ?)`,
|
||||
[blobId, content, now, now]
|
||||
);
|
||||
sql.execute(
|
||||
`INSERT INTO notes (noteId, title, type, mime, blobId, isProtected, isDeleted,
|
||||
dateCreated, dateModified, utcDateCreated, utcDateModified)
|
||||
VALUES (?, ?, 'text', 'text/html', ?, 0, 0, ?, ?, ?, ?)`,
|
||||
[noteId, title, blobId, now, now, now, now]
|
||||
);
|
||||
sql.execute(
|
||||
`INSERT INTO branches (branchId, noteId, parentNoteId, notePosition, isDeleted, isExpanded, utcDateModified)
|
||||
VALUES (?, ?, ?, ?, 0, 0, ?)`,
|
||||
[branchId, noteId, parentId, n * 10, now]
|
||||
);
|
||||
noteIdx++;
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
console.log(` SQL seeding: ${seedMs.toFixed(0)}ms`);
|
||||
|
||||
const [, reloadMs] = timed(() => beccaLoader.load());
|
||||
const totalNotes = Object.keys(becca.notes).length;
|
||||
console.log(` Becca reload: ${reloadMs.toFixed(0)}ms Total notes: ${totalNotes}`);
|
||||
|
||||
// ── Warm caches ──
|
||||
searchService.searchNotesForAutocomplete("test", true);
|
||||
|
||||
// ════════════════════════════════════════════
|
||||
// PROFILING AT SCALE
|
||||
// ════════════════════════════════════════════
|
||||
|
||||
console.log(`\n ════ PROFILING (${totalNotes} notes) ════\n`);
|
||||
|
||||
// 1. getCandidateNotes cost (the full-scan bottleneck)
|
||||
const allNotes = Object.values(becca.notes);
|
||||
const [, flatScanMs] = timed(() => {
|
||||
let count = 0;
|
||||
for (const note of allNotes) {
|
||||
const ft = note.getFlatText();
|
||||
if (ft.includes("test")) count++;
|
||||
}
|
||||
return count;
|
||||
});
|
||||
console.log(` getFlatText + includes scan (${allNotes.length} notes): ${flatScanMs.toFixed(1)}ms`);
|
||||
|
||||
// 2. Full findResultsWithQuery (includes candidate scan + parent walk + scoring)
|
||||
const findTimes: number[] = [];
|
||||
let findResultCount = 0;
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const [r, ms] = timed(() =>
|
||||
searchService.findResultsWithQuery("test", new SearchContext({ fastSearch: true }))
|
||||
);
|
||||
findTimes.push(ms);
|
||||
findResultCount = r.length;
|
||||
}
|
||||
const findAvg = findTimes.reduce((a, b) => a + b, 0) / findTimes.length;
|
||||
console.log(` findResultsWithQuery (fast): avg ${findAvg.toFixed(1)}ms (${findResultCount} results)`);
|
||||
|
||||
// 3. Exact-only (no fuzzy)
|
||||
const exactTimes: number[] = [];
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const [, ms] = timed(() =>
|
||||
searchService.findResultsWithQuery("test", new SearchContext({ fastSearch: true, enableFuzzyMatching: false }))
|
||||
);
|
||||
exactTimes.push(ms);
|
||||
}
|
||||
const exactAvg = exactTimes.reduce((a, b) => a + b, 0) / exactTimes.length;
|
||||
console.log(` findResultsWithQuery (exact): avg ${exactAvg.toFixed(1)}ms`);
|
||||
console.log(` Fuzzy overhead: ${(findAvg - exactAvg).toFixed(1)}ms`);
|
||||
|
||||
// 4. SearchResult construction + computeScore cost (isolated)
|
||||
const results = searchService.findResultsWithQuery("test", new SearchContext({ fastSearch: true }));
|
||||
console.log(` Total results before trim: ${results.length}`);
|
||||
|
||||
const [, scoreAllMs] = timed(() => {
|
||||
for (const r of results) r.computeScore("test", ["test"], true);
|
||||
});
|
||||
console.log(` computeScore × ${results.length}: ${scoreAllMs.toFixed(1)}ms (${(scoreAllMs / results.length).toFixed(3)}ms/result)`);
|
||||
|
||||
// 5. getNoteTitleForPath for all results
|
||||
const [, pathTitleMs] = timed(() => {
|
||||
for (const r of results) beccaService.getNoteTitleForPath(r.notePathArray);
|
||||
});
|
||||
console.log(` getNoteTitleForPath × ${results.length}: ${pathTitleMs.toFixed(1)}ms`);
|
||||
|
||||
// 6. Content snippet extraction (only 200)
|
||||
const trimmed = results.slice(0, 200);
|
||||
const [, snippetMs] = timed(() => {
|
||||
for (const r of trimmed) {
|
||||
r.contentSnippet = searchService.extractContentSnippet(r.noteId, ["test"]);
|
||||
}
|
||||
});
|
||||
console.log(` extractContentSnippet × 200: ${snippetMs.toFixed(1)}ms`);
|
||||
|
||||
// 7. Highlighting (only 200)
|
||||
const [, hlMs] = timed(() => {
|
||||
searchService.highlightSearchResults(trimmed, ["test"]);
|
||||
});
|
||||
console.log(` highlightSearchResults × 200: ${hlMs.toFixed(1)}ms`);
|
||||
|
||||
// 7b. getBestNotePath cost (used by fast path)
|
||||
const sampleNotes = Object.values(becca.notes).filter(n => n.title.startsWith("Test Document")).slice(0, 1000);
|
||||
const [, bestPathMs] = timed(() => {
|
||||
for (const n of sampleNotes) n.getBestNotePath();
|
||||
});
|
||||
console.log(` getBestNotePath × ${sampleNotes.length}: ${bestPathMs.toFixed(1)}ms (${(bestPathMs/sampleNotes.length).toFixed(3)}ms/note)`);
|
||||
|
||||
// 8. Full autocomplete end-to-end
|
||||
const autoTimes: number[] = [];
|
||||
let autoCount = 0;
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const [r, ms] = timed(() =>
|
||||
searchService.searchNotesForAutocomplete("test", true)
|
||||
);
|
||||
autoTimes.push(ms);
|
||||
autoCount = r.length;
|
||||
}
|
||||
const autoAvg = autoTimes.reduce((a, b) => a + b, 0) / autoTimes.length;
|
||||
const autoMin = Math.min(...autoTimes);
|
||||
console.log(`\n ★ FULL AUTOCOMPLETE: avg ${autoAvg.toFixed(1)}ms min ${autoMin.toFixed(1)}ms (${autoCount} results)`);
|
||||
|
||||
// 9. With a less common search term (fewer matches)
|
||||
const rareTimes: number[] = [];
|
||||
let rareCount = 0;
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const [r, ms] = timed(() =>
|
||||
searchService.searchNotesForAutocomplete("leitfaden", true)
|
||||
);
|
||||
rareTimes.push(ms);
|
||||
rareCount = r.length;
|
||||
}
|
||||
const rareAvg = rareTimes.reduce((a, b) => a + b, 0) / rareTimes.length;
|
||||
console.log(` Autocomplete "leitfaden": avg ${rareAvg.toFixed(1)}ms (${rareCount} results)`);
|
||||
|
||||
// 10. Full search (fastSearch=false) — the 2.7s bottleneck
|
||||
console.log(`\n ── Full search (fastSearch=false) ──`);
|
||||
const fullTimes: number[] = [];
|
||||
let fullCount = 0;
|
||||
for (let i = 0; i < 2; i++) {
|
||||
const [r, ms] = timed(() =>
|
||||
searchService.findResultsWithQuery("test", new SearchContext({ fastSearch: false }))
|
||||
);
|
||||
fullTimes.push(ms);
|
||||
fullCount = r.length;
|
||||
}
|
||||
const fullAvg = fullTimes.reduce((a, b) => a + b, 0) / fullTimes.length;
|
||||
console.log(` Full search (flat + SQL): avg ${fullAvg.toFixed(1)}ms (${fullCount} results)`);
|
||||
|
||||
// 11. SQL content scan alone
|
||||
const [scanCount, scanMs] = timed(() => {
|
||||
let count = 0;
|
||||
for (const row of sql.iterateRows<{ content: Buffer | string }>(`
|
||||
SELECT noteId, type, mime, content, isProtected
|
||||
FROM notes JOIN blobs USING (blobId)
|
||||
WHERE type IN ('text', 'code', 'mermaid', 'canvas', 'mindMap')
|
||||
AND isDeleted = 0
|
||||
AND LENGTH(content) < 2097152`)) {
|
||||
count++;
|
||||
}
|
||||
return count;
|
||||
});
|
||||
console.log(` Raw SQL scan (${scanCount} rows): ${scanMs.toFixed(1)}ms`);
|
||||
|
||||
// ── Summary ──
|
||||
console.log(`\n ════ SUMMARY ════`);
|
||||
console.log(` Notes: ${totalNotes} | Matches: ${findResultCount} | Hierarchy depth: 3 (root → folder → note)`);
|
||||
console.log(` ──────────────────────────────────`);
|
||||
console.log(` Autocomplete (fast): ${autoAvg.toFixed(1)}ms`);
|
||||
console.log(` findResults: ${findAvg.toFixed(1)}ms (${((findAvg/autoAvg)*100).toFixed(0)}%)`);
|
||||
console.log(` snippets+highlight: ${(snippetMs + hlMs).toFixed(1)}ms (${(((snippetMs+hlMs)/autoAvg)*100).toFixed(0)}%)`);
|
||||
console.log(` Full search: ${fullAvg.toFixed(1)}ms`);
|
||||
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}, 600_000);
|
||||
});
|
||||
@@ -79,7 +79,7 @@ CREATE UNIQUE INDEX `IDX_entityChanges_entityName_entityId` ON "entity_changes"
|
||||
`entityId`
|
||||
);
|
||||
CREATE INDEX `IDX_branches_noteId_parentNoteId` ON `branches` (`noteId`,`parentNoteId`);
|
||||
CREATE INDEX IDX_branches_parentNoteId_isDeleted_notePosition ON branches (parentNoteId, isDeleted, notePosition);
|
||||
CREATE INDEX IDX_branches_parentNoteId ON branches (parentNoteId);
|
||||
CREATE INDEX `IDX_notes_title` ON `notes` (`title`);
|
||||
CREATE INDEX `IDX_notes_type` ON `notes` (`type`);
|
||||
CREATE INDEX `IDX_notes_dateCreated` ON `notes` (`dateCreated`);
|
||||
@@ -146,13 +146,6 @@ CREATE INDEX IDX_notes_blobId on notes (blobId);
|
||||
CREATE INDEX IDX_revisions_blobId on revisions (blobId);
|
||||
CREATE INDEX IDX_attachments_blobId on attachments (blobId);
|
||||
|
||||
CREATE INDEX IDX_entity_changes_isSynced_id ON entity_changes (isSynced, id);
|
||||
CREATE INDEX IDX_entity_changes_isErased_entityName ON entity_changes (isErased, entityName);
|
||||
CREATE INDEX IDX_notes_isDeleted_utcDateModified ON notes (isDeleted, utcDateModified);
|
||||
CREATE INDEX IDX_branches_isDeleted_utcDateModified ON branches (isDeleted, utcDateModified);
|
||||
CREATE INDEX IDX_attributes_isDeleted_utcDateModified ON attributes (isDeleted, utcDateModified);
|
||||
CREATE INDEX IDX_attachments_isDeleted_utcDateModified ON attachments (isDeleted, utcDateModified);
|
||||
CREATE INDEX IDX_attachments_utcDateScheduledForErasureSince ON attachments (utcDateScheduledForErasureSince);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
|
||||
190
apps/server/src/assets/doc_notes/en/User Guide/User Guide/Note Types.html
generated
vendored
190
apps/server/src/assets/doc_notes/en/User Guide/User Guide/Note Types.html
generated
vendored
@@ -9,7 +9,8 @@
|
||||
note where to place the new one and select:</p>
|
||||
<ul>
|
||||
<li><em>Insert note after</em>, to put the new note underneath the one selected.</li>
|
||||
<li><em>Insert child note</em>, to insert the note as a child of the selected
|
||||
<li
|
||||
><em>Insert child note</em>, to insert the note as a child of the selected
|
||||
note.</li>
|
||||
</ul>
|
||||
<p>
|
||||
@@ -20,7 +21,8 @@
|
||||
<li>When adding a <a href="#root/_help_QEAPj01N5f7w">link</a> in a <a class="reference-link"
|
||||
href="#root/_help_iPIMuisry3hd">Text</a> note, type the desired title of
|
||||
the new note and press Enter. Afterwards the type of the note will be asked.</li>
|
||||
<li>Similarly, when creating a new tab, type the desired title and press Enter.</li>
|
||||
<li
|
||||
>Similarly, when creating a new tab, type the desired title and press Enter.</li>
|
||||
</ul>
|
||||
<h2>Changing the type of a note</h2>
|
||||
<p>It is possible to change the type of a note after it has been created
|
||||
@@ -30,94 +32,96 @@
|
||||
edit the <a href="#root/_help_4FahAwuGTAwC">source of a note</a>.</p>
|
||||
<h2>Supported note types</h2>
|
||||
<p>The following note types are supported by Trilium:</p>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Note Type</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_iPIMuisry3hd">Text</a>
|
||||
</td>
|
||||
<td>The default note type, which allows for rich text formatting, images,
|
||||
admonitions and right-to-left support.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_6f9hih2hXXZk">Code</a>
|
||||
</td>
|
||||
<td>Uses a mono-space font and can be used to store larger chunks of code
|
||||
or plain text than a text note, and has better syntax highlighting.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_m523cpzocqaD">Saved Search</a>
|
||||
</td>
|
||||
<td>Stores the information about a search (the search text, criteria, etc.)
|
||||
for later use. Can be used for quick filtering of a large amount of notes,
|
||||
for example. The search can easily be triggered.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_iRwzGnHPzonm">Relation Map</a>
|
||||
</td>
|
||||
<td>Allows easy creation of notes and relations between them. Can be used
|
||||
for mainly relational data such as a family tree.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_bdUJEHsAPYQR">Note Map</a>
|
||||
</td>
|
||||
<td>Displays the relationships between the notes, whether via relations or
|
||||
their hierarchical structure.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_HcABDtFCkbFN">Render Note</a>
|
||||
</td>
|
||||
<td>Used in <a class="reference-link" href="#root/_help_CdNpE2pqjmI6">Scripting</a>,
|
||||
it displays the HTML content of another note. This allows displaying any
|
||||
kind of content, provided there is a script behind it to generate it.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_GTwFsgaA0lCt">Collections</a>
|
||||
</td>
|
||||
<td>Displays the children of the note either as a grid, a list, or for a more
|
||||
specialized case: a calendar.
|
||||
<br>
|
||||
<br>Generally useful for easy reading of short notes.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_s1aBHPd79XYj">Mermaid Diagrams</a>
|
||||
</td>
|
||||
<td>Displays diagrams such as bar charts, flow charts, state diagrams, etc.
|
||||
Requires a bit of technical knowledge since the diagrams are written in
|
||||
a specialized format.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_grjYqerjn243">Canvas</a>
|
||||
</td>
|
||||
<td>Allows easy drawing of sketches, diagrams, handwritten content. Uses the
|
||||
same technology behind <a href="https://excalidraw.com">excalidraw.com</a>.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_1vHRoWCEjj0L">Web View</a>
|
||||
</td>
|
||||
<td>Displays the content of an external web page, similar to a browser.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_gBbsAeiuUxI5">Mind Map</a>
|
||||
</td>
|
||||
<td>Easy for brainstorming ideas, by placing them in a hierarchical layout.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_81SGnPGMk7Xc">Geo Map</a>
|
||||
</td>
|
||||
<td>Displays the children of the note as a geographical map, one use-case
|
||||
would be to plan vacations. It even has basic support for tracks. Notes
|
||||
can also be created from it.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_W8vYD3Q1zjCR">File</a>
|
||||
</td>
|
||||
<td>Represents an uploaded file such as PDFs, images, video or audio files.</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<figure class="table">
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Note Type</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_iPIMuisry3hd">Text</a>
|
||||
</td>
|
||||
<td>The default note type, which allows for rich text formatting, images,
|
||||
admonitions and right-to-left support.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_6f9hih2hXXZk">Code</a>
|
||||
</td>
|
||||
<td>Uses a mono-space font and can be used to store larger chunks of code
|
||||
or plain text than a text note, and has better syntax highlighting.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_m523cpzocqaD">Saved Search</a>
|
||||
</td>
|
||||
<td>Stores the information about a search (the search text, criteria, etc.)
|
||||
for later use. Can be used for quick filtering of a large amount of notes,
|
||||
for example. The search can easily be triggered.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_iRwzGnHPzonm">Relation Map</a>
|
||||
</td>
|
||||
<td>Allows easy creation of notes and relations between them. Can be used
|
||||
for mainly relational data such as a family tree.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_bdUJEHsAPYQR">Note Map</a>
|
||||
</td>
|
||||
<td>Displays the relationships between the notes, whether via relations or
|
||||
their hierarchical structure.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_HcABDtFCkbFN">Render Note</a>
|
||||
</td>
|
||||
<td>Used in <a class="reference-link" href="#root/_help_CdNpE2pqjmI6">Scripting</a>,
|
||||
it displays the HTML content of another note. This allows displaying any
|
||||
kind of content, provided there is a script behind it to generate it.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_GTwFsgaA0lCt">Collections</a>
|
||||
</td>
|
||||
<td>Displays the children of the note either as a grid, a list, or for a more
|
||||
specialized case: a calendar.
|
||||
<br>
|
||||
<br>Generally useful for easy reading of short notes.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_s1aBHPd79XYj">Mermaid Diagrams</a>
|
||||
</td>
|
||||
<td>Displays diagrams such as bar charts, flow charts, state diagrams, etc.
|
||||
Requires a bit of technical knowledge since the diagrams are written in
|
||||
a specialized format.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_grjYqerjn243">Canvas</a>
|
||||
</td>
|
||||
<td>Allows easy drawing of sketches, diagrams, handwritten content. Uses the
|
||||
same technology behind <a href="https://excalidraw.com">excalidraw.com</a>.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_1vHRoWCEjj0L">Web View</a>
|
||||
</td>
|
||||
<td>Displays the content of an external web page, similar to a browser.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_gBbsAeiuUxI5">Mind Map</a>
|
||||
</td>
|
||||
<td>Easy for brainstorming ideas, by placing them in a hierarchical layout.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_81SGnPGMk7Xc">Geo Map</a>
|
||||
</td>
|
||||
<td>Displays the children of the note as a geographical map, one use-case
|
||||
would be to plan vacations. It even has basic support for tracks. Notes
|
||||
can also be created from it.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a class="reference-link" href="#root/_help_W8vYD3Q1zjCR">File</a>
|
||||
</td>
|
||||
<td>Represents an uploaded file such as PDFs, images, video or audio files.</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</figure>
|
||||
@@ -6,20 +6,10 @@
|
||||
<img style="aspect-ratio:886/663;" src="2_Mermaid Diagrams_image.png"
|
||||
width="886" height="663">
|
||||
</figure>
|
||||
<h2>Types of diagrams</h2>
|
||||
<p>Trilium supports Mermaid, which adds support for various diagrams such
|
||||
as flowchart, sequence diagram, class diagram, state diagram, pie charts,
|
||||
etc., all using a text description of the chart instead of manually drawing
|
||||
the diagram.</p>
|
||||
<p>Starting with v0.103.0, Mermaid diagrams no longer start with a sample
|
||||
flowchart, but instead a pane at the bottom will show all the supported
|
||||
diagrams with sample code for each:</p>
|
||||
<ul>
|
||||
<li>Simply click on any of the samples to apply it.</li>
|
||||
<li>The pane will disappear as soon as something is typed in the code editor
|
||||
or a sample is selected. To make it appear again, simply remove the content
|
||||
of the note.</li>
|
||||
</ul>
|
||||
<h2>Layouts</h2>
|
||||
<p>Depending on the chart being edited and user preference, there are two
|
||||
layouts supported by the Mermaid note type:</p>
|
||||
|
||||
58
apps/server/src/assets/doc_notes/en/User Guide/User Guide/Note Types/Spreadsheets.html
generated
vendored
58
apps/server/src/assets/doc_notes/en/User Guide/User Guide/Note Types/Spreadsheets.html
generated
vendored
@@ -11,17 +11,17 @@
|
||||
Calc, with support for formulas, data validation and text formatting.</p>
|
||||
<h2>Spreadsheets vs. collections</h2>
|
||||
<p>There is a slight overlap between spreadsheets and the <a class="reference-link"
|
||||
href="#root/_help_2FvYrpmOXm29">Table</a> collection. In general the table
|
||||
collection is useful to track meta-information about notes (for example
|
||||
a collection of people and their birthdays), whereas spreadsheets are quite
|
||||
useful for calculations since they support formulas.</p>
|
||||
href="#root/pOsGYCXsbNQG/GTwFsgaA0lCt/_help_2FvYrpmOXm29">Table</a> collection.
|
||||
In general the table collection is useful to track meta-information about
|
||||
notes (for example a collection of people and their birthdays), whereas
|
||||
spreadsheets are quite useful for calculations since they support formulas.</p>
|
||||
<p>Spreadsheets also benefit from a wider range of features such as data
|
||||
validation, formatting and can work on a relatively large dataset.</p>
|
||||
<h2>Important statement regarding data format</h2>
|
||||
<p>For Trilium as a knowledge database, it is important that data is stored
|
||||
in a format that is easy to convert to something else. For example,
|
||||
<a
|
||||
class="reference-link" href="#root/_help_iPIMuisry3hd">Text</a> notes can be exported to either HTML or Markdown, making
|
||||
class="reference-link" href="#root/pOsGYCXsbNQG/KSZ04uQ2D1St/_help_iPIMuisry3hd">Text</a> notes can be exported to either HTML or Markdown, making
|
||||
it relatively easy to migrate to another software or simply to stand the
|
||||
test of time.</p>
|
||||
<p>For spreadsheets, Trilium uses a technology called <a href="https://docs.univer.ai/">Univer Sheets</a>,
|
||||
@@ -41,16 +41,28 @@
|
||||
<h2>Supported features</h2>
|
||||
<p>The spreadsheet has support for the following features:</p>
|
||||
<ul>
|
||||
<li>Filtering</li>
|
||||
<li>Sorting</li>
|
||||
<li>Data validation</li>
|
||||
<li>Conditional formatting</li>
|
||||
<li>Notes / annotations</li>
|
||||
<li>Find / replace</li>
|
||||
<li>
|
||||
<p>Filtering</p>
|
||||
</li>
|
||||
<li>
|
||||
<p>Sorting</p>
|
||||
</li>
|
||||
<li>
|
||||
<p>Data validation</p>
|
||||
</li>
|
||||
<li>
|
||||
<p>Conditional formatting</p>
|
||||
</li>
|
||||
<li>
|
||||
<p>Notes / annotations</p>
|
||||
</li>
|
||||
<li>
|
||||
<p>Find / replace</p>
|
||||
</li>
|
||||
</ul>
|
||||
<p>We might consider adding <a href="https://docs.univer.ai/guides/sheets/features/filter">other features</a> from
|
||||
Univer at some point. If there is a particular feature that can be added
|
||||
easily, it can be discussed over <a href="#root/_help_wy8So3yZZlH9">GitHub Issues</a>.</p>
|
||||
easily, it can be discussed over <a href="#root/pOsGYCXsbNQG/BgmBlOIl72jZ/_help_wy8So3yZZlH9">GitHub Issues</a>.</p>
|
||||
<h2>Features not supported yet</h2>
|
||||
<h3>Regarding Pro features</h3>
|
||||
<p>Univer spreadsheets also feature a <a href="https://univer.ai/pro">Pro plan</a> which
|
||||
@@ -63,9 +75,15 @@
|
||||
<p>There are a few features that are already planned but are not supported
|
||||
yet:</p>
|
||||
<ul>
|
||||
<li>Trilium-specific formulas (e.g. to obtain the title of a note).</li>
|
||||
<li>User-defined formulas</li>
|
||||
<li>Cross-workbook calculation</li>
|
||||
<li>
|
||||
<p>Trilium-specific formulas (e.g. to obtain the title of a note).</p>
|
||||
</li>
|
||||
<li>
|
||||
<p>User-defined formulas</p>
|
||||
</li>
|
||||
<li>
|
||||
<p>Cross-workbook calculation</p>
|
||||
</li>
|
||||
</ul>
|
||||
<p>If you would like us to work on these features, consider <a href="https://triliumnotes.org/en/support-us">supporting us</a>.</p>
|
||||
<h2>Known limitations</h2>
|
||||
@@ -74,10 +92,12 @@
|
||||
<p>It is possible to share a spreadsheet, case in which a best-effort HTML
|
||||
rendering of the spreadsheet is done.</p>
|
||||
<ul>
|
||||
<li>For more advanced use cases, this will most likely not work as intended.
|
||||
Feel free to <a href="#root/_help_wy8So3yZZlH9">report issues</a>, but keep in
|
||||
mind that we might not be able to have a complete feature parity with all
|
||||
the features of Univer.</li>
|
||||
<li>
|
||||
<p>For more advanced use cases, this will most likely not work as intended.
|
||||
Feel free to <a href="#root/pOsGYCXsbNQG/BgmBlOIl72jZ/_help_wy8So3yZZlH9">report issues</a>,
|
||||
but keep in mind that we might not be able to have a complete feature parity
|
||||
with all the features of Univer.</p>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
|
||||
@@ -101,9 +101,7 @@
|
||||
"copy-without-formatting": "선택한 텍스트를 서식 없이 복사",
|
||||
"force-save-revision": "활성 노트의 새 버전을 강제로 생성/저장",
|
||||
"toggle-book-properties": "컬렉션 속성 토글",
|
||||
"toggle-classic-editor-toolbar": "고정 도구 모음 에디터의 서식 탭을 전환",
|
||||
"export-as-pdf": "현재 노트를 PDF로 내보내기",
|
||||
"toggle-zen-mode": "젠 모드 활성화/비활성화(편집에 집중하기 위한 최소한의 UI)"
|
||||
"toggle-classic-editor-toolbar": "고정 도구 모음 에디터의 서식 탭을 전환"
|
||||
},
|
||||
"hidden-subtree": {
|
||||
"zen-mode": "젠 모드",
|
||||
@@ -126,90 +124,5 @@
|
||||
"sync-title": "동기화",
|
||||
"other": "기타",
|
||||
"advanced-title": "고급"
|
||||
},
|
||||
"keyboard_action_names": {
|
||||
"back-in-note-history": "노트 기록으로 돌아가기",
|
||||
"forward-in-note-history": "노트 기록 앞으로 이동",
|
||||
"command-palette": "명령 팔레트",
|
||||
"scroll-to-active-note": "활성 노트로 스크롤",
|
||||
"quick-search": "빠른 검색",
|
||||
"search-in-subtree": "하위트리에서 검색",
|
||||
"expand-subtree": "하위트리 펼치기",
|
||||
"collapse-tree": "트리 접기",
|
||||
"collapse-subtree": "하위트리 접기",
|
||||
"sort-child-notes": "자식 노트 정렬",
|
||||
"create-note-into-inbox": "인박스에 노트 만들기",
|
||||
"delete-notes": "노트 삭제",
|
||||
"edit-note-title": "노트 제목 편집",
|
||||
"clone-notes-to": "다음으로 복사",
|
||||
"move-notes-to": "다음으로 노트 이동",
|
||||
"copy-notes-to-clipboard": "노트를 클립보드로 복사",
|
||||
"paste-notes-from-clipboard": "클립보드에서 노트 붙이기",
|
||||
"cut-notes-to-clipboard": "클립보드로 노트 잘라내기",
|
||||
"add-note-above-to-selection": "선택 위에 새로운 노트 추가",
|
||||
"add-note-below-to-selection": "선택 아래에 새로운 노트 추가",
|
||||
"duplicate-subtree": "하위트리 복제",
|
||||
"open-new-tab": "새로운탭 열기",
|
||||
"jump-to-note": "이동하기...",
|
||||
"move-note-down": "노트 아래로 이동",
|
||||
"move-note-up": "노트 위로 이동",
|
||||
"close-active-tab": "활성탭 닫기",
|
||||
"reopen-last-tab": "마지막 탭 다시 열기",
|
||||
"activate-next-tab": "다음 탭 활성화",
|
||||
"activate-previous-tab": "이전 탭 활성화",
|
||||
"open-new-window": "새창 열기",
|
||||
"toggle-system-tray-icon": "시스템 트레이 아이콘 토글",
|
||||
"toggle-zen-mode": "젠 모드 토글",
|
||||
"show-note-source": "노트 소스 보기",
|
||||
"show-options": "옵션 보기",
|
||||
"show-revisions": "리비전 보기",
|
||||
"show-recent-changes": "최근 변경사항 보기",
|
||||
"show-sql-console": "SQL 콘솔 보기",
|
||||
"show-backend-log": "백엔드 로그 보기",
|
||||
"show-help": "도움말 보기",
|
||||
"follow-link-under-cursor": "커서 아래 링크 따라 가기",
|
||||
"insert-date-and-time-to-text": "날짜와 시간 텍스트로 추가",
|
||||
"paste-markdown-into-text": "마크다운을 텍스트로 붙여넣기",
|
||||
"edit-read-only-note": "읽기 전용 노트 편집",
|
||||
"add-new-label": "새로운 라벨 추가",
|
||||
"add-new-relation": "새로운 관계 추가",
|
||||
"toggle-ribbon-tab-classic-editor": "클래식 에디터 리본 탭 토글",
|
||||
"toggle-ribbon-tab-basic-properties": "기본 설정 리본탭 토글",
|
||||
"print-active-note": "활성 노트 프린트",
|
||||
"export-active-note-as-pdf": "활성 노트를 PDF로 내보내기",
|
||||
"reload-frontend-app": "프론트엔드 앱 다시 로드",
|
||||
"open-developer-tools": "개발자 툴 열기",
|
||||
"find-in-text": "텍스트에서 찾기",
|
||||
"toggle-full-screen": "전체화면 토글",
|
||||
"zoom-out": "축소",
|
||||
"zoom-in": "확대",
|
||||
"reset-zoom-level": "확대/축소 다시 설정",
|
||||
"copy-without-formatting": "일반 텍스트로 복사",
|
||||
"force-save-revision": "리비전 강제 저장"
|
||||
},
|
||||
"login": {
|
||||
"title": "로그인",
|
||||
"heading": "Trilium 로그인",
|
||||
"incorrect-password": "암호가 맞지 않습니다. 다시 입력해 주세요.",
|
||||
"password": "암호",
|
||||
"button": "로그인",
|
||||
"sign_in_with_sso": "{{ ssoIssuerName }}로 로그인"
|
||||
},
|
||||
"set_password": {
|
||||
"title": "암호 설정",
|
||||
"heading": "암호 설정",
|
||||
"description": "Trilium을 웹에서 사용하기 전에 암호를 먼저 설정해야 합니다. 이 암호로 로그인하세요.",
|
||||
"password": "암호",
|
||||
"password-confirmation": "암호 확인",
|
||||
"button": "암호 설정"
|
||||
},
|
||||
"setup": {
|
||||
"heading": "Trilium 노트 셋업",
|
||||
"next": "다음",
|
||||
"init-in-progress": "문서 초기화 진행 중",
|
||||
"title": "셋업"
|
||||
},
|
||||
"setup_sync-from-desktop": {
|
||||
"step5": "연결 설정이 성공적인지 확인을 위해 \"Test sync\" 버튼을 클릭하세요."
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,35 +58,35 @@
|
||||
<div class="alert alert-warning" id="alert" style="display: none;">
|
||||
</div>
|
||||
|
||||
<div id="setup-type-section" style="margin-top: 20px;">
|
||||
<form id="setup-type-form">
|
||||
<div id="setup-type" data-bind="visible: step() == 'setup-type'" style="margin-top: 20px;">
|
||||
<form data-bind="submit: selectSetupType">
|
||||
|
||||
<div class="radio" style="margin-bottom: 15px;">
|
||||
<label class="tn-radio">
|
||||
<input type="radio" name="setup-type" value="new-document">
|
||||
<input type="radio" name="setup-type" value="new-document" data-bind="checked: setupType">
|
||||
<%= t("setup.new-document") %>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div class="radio" style="margin-bottom: 15px;">
|
||||
<label class="tn-radio">
|
||||
<input type="radio" name="setup-type" value="sync-from-desktop">
|
||||
<input type="radio" name="setup-type" value="sync-from-desktop" data-bind="checked: setupType">
|
||||
<%= t("setup.sync-from-desktop") %>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div class="radio" style="margin-bottom: 15px;">
|
||||
<label class="tn-radio">
|
||||
<input type="radio" name="setup-type" value="sync-from-server">
|
||||
<input type="radio" name="setup-type" value="sync-from-server" data-bind="checked: setupType">
|
||||
<%= t("setup.sync-from-server") %>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<button type="submit" id="setup-type-next" class="btn btn-primary" disabled><%= t("setup.next") %></button>
|
||||
<button type="submit" data-bind="disable: !setupTypeSelected()" class="btn btn-primary"><%= t("setup.next") %></button>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="new-document-in-progress-section">
|
||||
<div data-bind="visible: step() == 'new-document-in-progress'">
|
||||
<h2><%= t("setup.init-in-progress") %></h2>
|
||||
|
||||
<div style="display: flex; justify-content: flex-start; margin-top: 20px;">
|
||||
@@ -103,7 +103,7 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="sync-from-desktop-section">
|
||||
<div data-bind="visible: step() == 'sync-from-desktop'">
|
||||
<h2><%= t("setup_sync-from-desktop.heading") %></h2>
|
||||
|
||||
<p><%= t("setup_sync-from-desktop.description") %></p>
|
||||
@@ -117,11 +117,11 @@
|
||||
<li><%- t("setup_sync-from-desktop.step6", { link: `<a href="/">${t("setup_sync-from-desktop.step6-here")}</a>` }) %></li>
|
||||
</ol>
|
||||
|
||||
<button type="button" data-action="back" class="btn btn-secondary">Back</button>
|
||||
<button type="button" data-bind="click: back" class="btn btn-secondary">Back</button>
|
||||
</div>
|
||||
|
||||
<div id="sync-from-server-section">
|
||||
<form id="sync-from-server-form">
|
||||
<div data-bind="visible: step() == 'sync-from-server'">
|
||||
<form data-bind="submit: finish">
|
||||
|
||||
<h2><%= t("setup_sync-from-server.heading") %></h2>
|
||||
|
||||
@@ -129,27 +129,27 @@
|
||||
|
||||
<div class="form-group">
|
||||
<label for="sync-server-host"><%= t("setup_sync-from-server.server-host") %></label>
|
||||
<input type="text" id="sync-server-host" class="form-control" placeholder="<%= t("setup_sync-from-server.server-host-placeholder") %>">
|
||||
<input type="text" id="syncServerHost" class="form-control" data-bind="value: syncServerHost" placeholder="<%= t("setup_sync-from-server.server-host-placeholder") %>">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="sync-proxy"><%= t("setup_sync-from-server.proxy-server") %></label>
|
||||
<input type="text" id="sync-proxy" class="form-control" placeholder="<%= t("setup_sync-from-server.proxy-server-placeholder") %>">
|
||||
<input type="text" id="sync-proxy" class="form-control" data-bind="value: syncProxy" placeholder="<%= t("setup_sync-from-server.proxy-server-placeholder") %>">
|
||||
|
||||
<p><strong><%= t("setup_sync-from-server.note") %></strong> <%= t("setup_sync-from-server.proxy-instruction") %></p>
|
||||
</div>
|
||||
<div class="form-group" style="margin-bottom: 8px;">
|
||||
<label for="password"><%= t("setup_sync-from-server.password") %></label>
|
||||
<input type="password" id="password" class="form-control" placeholder="<%= t("setup_sync-from-server.password-placeholder") %>">
|
||||
<input type="password" id="password" class="form-control" data-bind="value: password" placeholder="<%= t("setup_sync-from-server.password-placeholder") %>">
|
||||
</div>
|
||||
|
||||
<button type="button" data-action="back" class="btn btn-secondary"><%= t("setup_sync-from-server.back") %></button>
|
||||
<button type="button" data-bind="click: back" class="btn btn-secondary"><%= t("setup_sync-from-server.back") %></button>
|
||||
|
||||
<button type="submit" class="btn btn-primary"><%= t("setup_sync-from-server.finish-setup") %></button>
|
||||
</form>
|
||||
|
||||
</div>
|
||||
|
||||
<div id="sync-in-progress-section">
|
||||
<div data-bind="visible: step() == 'sync-in-progress'">
|
||||
<h2><%= t("setup_sync-in-progress.heading") %></h2>
|
||||
|
||||
<div class="alert alert-success"><%= t("setup_sync-in-progress.successful") %></div>
|
||||
|
||||
@@ -31,9 +31,22 @@ export default class Becca {
|
||||
|
||||
allNoteSetCache: NoteSet | null;
|
||||
|
||||
/**
|
||||
* Pre-built parallel arrays for fast flat text scanning in search.
|
||||
* Avoids per-note property access overhead when iterating 50K+ notes.
|
||||
* Supports incremental updates: when individual notes change, only their
|
||||
* entries are rebuilt rather than the entire index.
|
||||
*/
|
||||
flatTextIndex: { notes: BNote[], flatTexts: string[], noteIdToIdx: Map<string, number> } | null;
|
||||
|
||||
/** NoteIds whose flat text needs to be recomputed in the index. */
|
||||
dirtyFlatTextNoteIds: Set<string>;
|
||||
|
||||
constructor() {
|
||||
this.reset();
|
||||
this.dirtyFlatTextNoteIds = new Set();
|
||||
this.allNoteSetCache = null;
|
||||
this.flatTextIndex = null;
|
||||
this.reset();
|
||||
}
|
||||
|
||||
reset() {
|
||||
@@ -239,6 +252,59 @@ export default class Becca {
|
||||
/** Should be called when the set of all non-skeleton notes changes (added/removed) */
|
||||
dirtyNoteSetCache() {
|
||||
this.allNoteSetCache = null;
|
||||
// Full rebuild needed since the note set itself changed
|
||||
this.flatTextIndex = null;
|
||||
this.dirtyFlatTextNoteIds.clear();
|
||||
}
|
||||
|
||||
/** Mark a single note's flat text as needing recomputation in the index. */
|
||||
dirtyNoteFlatText(noteId: string) {
|
||||
if (this.flatTextIndex) {
|
||||
// Index exists — schedule an incremental update
|
||||
this.dirtyFlatTextNoteIds.add(noteId);
|
||||
}
|
||||
// If flatTextIndex is null, full rebuild will happen on next access anyway
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns pre-built parallel arrays of notes and their flat texts for fast scanning.
|
||||
* The flat texts are already normalized (lowercase, diacritics removed).
|
||||
* Supports incremental updates: when individual notes are dirtied, only their
|
||||
* entries are recomputed rather than rebuilding the entire index.
|
||||
*/
|
||||
getFlatTextIndex(): { notes: BNote[], flatTexts: string[], noteIdToIdx: Map<string, number> } {
|
||||
if (!this.flatTextIndex) {
|
||||
const allNoteSet = this.getAllNoteSet();
|
||||
const notes: BNote[] = [];
|
||||
const flatTexts: string[] = [];
|
||||
const noteIdToIdx = new Map<string, number>();
|
||||
|
||||
for (const note of allNoteSet.notes) {
|
||||
noteIdToIdx.set(note.noteId, notes.length);
|
||||
notes.push(note);
|
||||
flatTexts.push(note.getFlatText());
|
||||
}
|
||||
|
||||
this.flatTextIndex = { notes, flatTexts, noteIdToIdx };
|
||||
this.dirtyFlatTextNoteIds.clear();
|
||||
} else if (this.dirtyFlatTextNoteIds.size > 0) {
|
||||
// Incremental update: only recompute flat texts for dirtied notes
|
||||
const { flatTexts, noteIdToIdx } = this.flatTextIndex;
|
||||
|
||||
for (const noteId of this.dirtyFlatTextNoteIds) {
|
||||
const idx = noteIdToIdx.get(noteId);
|
||||
if (idx !== undefined) {
|
||||
const note = this.notes[noteId];
|
||||
if (note) {
|
||||
flatTexts[idx] = note.getFlatText();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.dirtyFlatTextNoteIds.clear();
|
||||
}
|
||||
|
||||
return this.flatTextIndex;
|
||||
}
|
||||
|
||||
getAllNoteSet() {
|
||||
|
||||
@@ -6,6 +6,7 @@ import dateUtils from "../../services/date_utils.js";
|
||||
import promotedAttributeDefinitionParser from "../../services/promoted_attribute_definition_parser.js";
|
||||
import sanitizeAttributeName from "../../services/sanitize_attribute_name.js";
|
||||
import type { AttributeRow, AttributeType } from "@triliumnext/commons";
|
||||
import { normalize } from "../../services/utils.js";
|
||||
|
||||
interface SavingOpts {
|
||||
skipValidation?: boolean;
|
||||
@@ -34,6 +35,11 @@ class BAttribute extends AbstractBeccaEntity<BAttribute> {
|
||||
value!: string;
|
||||
isInheritable!: boolean;
|
||||
|
||||
/** Pre-normalized (lowercase, diacritics removed) name for search. */
|
||||
normalizedName!: string;
|
||||
/** Pre-normalized (lowercase, diacritics removed) value for search. */
|
||||
normalizedValue!: string;
|
||||
|
||||
constructor(row?: AttributeRow) {
|
||||
super();
|
||||
|
||||
@@ -59,6 +65,10 @@ class BAttribute extends AbstractBeccaEntity<BAttribute> {
|
||||
this.isInheritable = !!isInheritable;
|
||||
this.utcDateModified = utcDateModified;
|
||||
|
||||
// Pre-compute normalized forms for search (avoids repeated normalize() calls in hot loops)
|
||||
this.normalizedName = normalize(this.name);
|
||||
this.normalizedValue = normalize(this.value);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@@ -790,6 +790,9 @@ class BNote extends AbstractBeccaEntity<BNote> {
|
||||
this.__attributeCache = null;
|
||||
this.__inheritableAttributeCache = null;
|
||||
this.__ancestorCache = null;
|
||||
|
||||
// Mark only this note's flat text as dirty for incremental index update
|
||||
this.becca.dirtyNoteFlatText(this.noteId);
|
||||
}
|
||||
|
||||
invalidateSubTree(path: string[] = []) {
|
||||
|
||||
2
apps/server/src/express.d.ts
vendored
2
apps/server/src/express.d.ts
vendored
@@ -26,7 +26,5 @@ export declare module "express-session" {
|
||||
totpEnabled: boolean;
|
||||
ssoEnabled: boolean;
|
||||
};
|
||||
/** Set during /bootstrap to mark the session as modified so express-session persists it and sends the cookie. */
|
||||
csrfInitialized?: true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,27 +6,6 @@
|
||||
|
||||
// Migrations should be kept in descending order, so the latest migration is first.
|
||||
const MIGRATIONS: (SqlMigration | JsMigration)[] = [
|
||||
// Add missing database indices for query performance
|
||||
{
|
||||
version: 235,
|
||||
sql: /*sql*/`
|
||||
CREATE INDEX IF NOT EXISTS IDX_entity_changes_isSynced_id
|
||||
ON entity_changes (isSynced, id);
|
||||
CREATE INDEX IF NOT EXISTS IDX_entity_changes_isErased_entityName
|
||||
ON entity_changes (isErased, entityName);
|
||||
CREATE INDEX IF NOT EXISTS IDX_notes_isDeleted_utcDateModified
|
||||
ON notes (isDeleted, utcDateModified);
|
||||
CREATE INDEX IF NOT EXISTS IDX_branches_isDeleted_utcDateModified
|
||||
ON branches (isDeleted, utcDateModified);
|
||||
CREATE INDEX IF NOT EXISTS IDX_attributes_isDeleted_utcDateModified
|
||||
ON attributes (isDeleted, utcDateModified);
|
||||
CREATE INDEX IF NOT EXISTS IDX_attachments_isDeleted_utcDateModified
|
||||
ON attachments (isDeleted, utcDateModified);
|
||||
DROP INDEX IF EXISTS IDX_branches_parentNoteId;
|
||||
CREATE INDEX IF NOT EXISTS IDX_branches_parentNoteId_isDeleted_notePosition
|
||||
ON branches (parentNoteId, isDeleted, notePosition);
|
||||
`
|
||||
},
|
||||
// Migrate aiChat notes to code notes since LLM integration has been removed
|
||||
{
|
||||
version: 234,
|
||||
|
||||
@@ -97,6 +97,7 @@ const ALLOWED_OPTIONS = new Set<OptionNames>([
|
||||
"layoutOrientation",
|
||||
"backgroundEffects",
|
||||
"allowedHtmlTags",
|
||||
"searchEnableFuzzyMatching",
|
||||
"redirectBareDomain",
|
||||
"showLoginInShareTheme",
|
||||
"splitEditorOrientation",
|
||||
|
||||
@@ -1,17 +1,7 @@
|
||||
import crypto from "crypto";
|
||||
import { doubleCsrf } from "csrf-csrf";
|
||||
|
||||
import sessionSecret from "../services/session_secret.js";
|
||||
import { isElectron } from "../services/utils.js";
|
||||
|
||||
export const CSRF_COOKIE_NAME = "trilium-csrf";
|
||||
|
||||
// In Electron, API calls go through an IPC bypass (routes/electron.ts) that uses a
|
||||
// FakeRequest with a static session ID, while the bootstrap request goes through real
|
||||
// Express with a real session. This mismatch causes CSRF validation to always fail.
|
||||
// We use a per-instance random identifier so each Electron process still gets unique tokens.
|
||||
const electronSessionId = crypto.randomUUID();
|
||||
|
||||
const doubleCsrfUtilities = doubleCsrf({
|
||||
getSecret: () => sessionSecret,
|
||||
cookieOptions: {
|
||||
@@ -20,8 +10,7 @@ const doubleCsrfUtilities = doubleCsrf({
|
||||
sameSite: "strict",
|
||||
httpOnly: !isElectron // set to false for Electron, see https://github.com/TriliumNext/Trilium/pull/966
|
||||
},
|
||||
cookieName: CSRF_COOKIE_NAME,
|
||||
getSessionIdentifier: (req) => isElectron ? electronSessionId : req.session.id
|
||||
cookieName: "_csrf"
|
||||
});
|
||||
|
||||
export const { generateCsrfToken, doubleCsrfProtection } = doubleCsrfUtilities;
|
||||
export const { generateToken, doubleCsrfProtection } = doubleCsrfUtilities;
|
||||
|
||||
@@ -3,7 +3,6 @@ import log from "../services/log.js";
|
||||
import NotFoundError from "../errors/not_found_error.js";
|
||||
import ForbiddenError from "../errors/forbidden_error.js";
|
||||
import HttpError from "../errors/http_error.js";
|
||||
import { CSRF_COOKIE_NAME } from "./csrf_protection.js";
|
||||
|
||||
function register(app: Application) {
|
||||
|
||||
@@ -15,10 +14,7 @@ function register(app: Application) {
|
||||
&& err.code === "EBADCSRFTOKEN";
|
||||
|
||||
if (isCsrfTokenError) {
|
||||
const csrfHeader = req.headers["x-csrf-token"];
|
||||
const csrfHeaderPrefix = typeof csrfHeader === "string" ? csrfHeader.slice(0, 8) : undefined;
|
||||
const tokenInfo = csrfHeaderPrefix ? ` (token prefix: ${csrfHeaderPrefix})` : "";
|
||||
log.error(`Invalid CSRF token on ${req.method} ${req.url}${tokenInfo}`);
|
||||
log.error(`Invalid CSRF token: ${req.headers["x-csrf-token"]}, secret: ${req.cookies["_csrf"]}`);
|
||||
return next(new ForbiddenError("Invalid CSRF token"));
|
||||
}
|
||||
|
||||
|
||||
@@ -11,28 +11,19 @@ import { generateCss, generateIconRegistry, getIconPacks, MIME_TO_EXTENSION_MAPP
|
||||
import log from "../services/log.js";
|
||||
import optionService from "../services/options.js";
|
||||
import protectedSessionService from "../services/protected_session.js";
|
||||
import { generateCsrfToken } from "./csrf_protection.js";
|
||||
import sql from "../services/sql.js";
|
||||
import { isDev, isElectron, isMac, isWindows11 } from "../services/utils.js";
|
||||
import { generateToken as generateCsrfToken } from "./csrf_protection.js";
|
||||
|
||||
|
||||
type View = "desktop" | "mobile" | "print";
|
||||
|
||||
export function bootstrap(req: Request, res: Response) {
|
||||
const options = optionService.getOptionMap();
|
||||
|
||||
// csrf-csrf v4 binds CSRF tokens to the session ID via HMAC. With saveUninitialized: false,
|
||||
// a brand-new session is never persisted unless explicitly modified, so its cookie is never
|
||||
// sent to the browser — meaning every request gets a different ephemeral session ID, and
|
||||
// CSRF validation fails. Setting this flag marks the session as modified, which causes
|
||||
// express-session to persist it and send the session cookie in this response.
|
||||
if (!req.session.csrfInitialized) {
|
||||
req.session.csrfInitialized = true;
|
||||
}
|
||||
|
||||
const csrfToken = generateCsrfToken(req, res, {
|
||||
overwrite: false,
|
||||
validateOnReuse: false // if validation fails, generate a new token instead of throwing an error
|
||||
});
|
||||
//'overwrite' set to false (default) => the existing token will be re-used and validated
|
||||
//'validateOnReuse' set to false => if validation fails, generate a new token instead of throwing an error
|
||||
const csrfToken = generateCsrfToken(req, res, false, false);
|
||||
log.info(`CSRF token generation: ${csrfToken ? "Successful" : "Failed"}`);
|
||||
|
||||
const view = getView(req);
|
||||
|
||||
@@ -5,7 +5,7 @@ import packageJson from "../../package.json" with { type: "json" };
|
||||
import build from "./build.js";
|
||||
import dataDir from "./data_dir.js";
|
||||
|
||||
const APP_DB_VERSION = 235;
|
||||
const APP_DB_VERSION = 234;
|
||||
const SYNC_VERSION = 37;
|
||||
const CLIPPER_PROTOCOL_VERSION = "1.0";
|
||||
|
||||
|
||||
@@ -198,6 +198,9 @@ const defaultOptions: DefaultOption[] = [
|
||||
isSynced: true
|
||||
},
|
||||
|
||||
// Search settings
|
||||
{ name: "searchEnableFuzzyMatching", value: "true", isSynced: true },
|
||||
|
||||
// Share settings
|
||||
{ name: "redirectBareDomain", value: "false", isSynced: true },
|
||||
{ name: "showLoginInShareTheme", value: "false", isSynced: true },
|
||||
|
||||
@@ -7,7 +7,7 @@ function parse(value: string): DefinitionObject {
|
||||
for (const token of tokens) {
|
||||
if (token === "promoted") {
|
||||
defObj.isPromoted = true;
|
||||
} else if (["text", "textarea", "number", "boolean", "date", "datetime", "time", "url"].includes(token)) {
|
||||
} else if (["text", "number", "boolean", "date", "datetime", "time", "url"].includes(token)) {
|
||||
defObj.labelType = token;
|
||||
} else if (["single", "multi"].includes(token)) {
|
||||
defObj.multiplicity = token;
|
||||
|
||||
@@ -7,7 +7,7 @@ import Expression from "./expression.js";
|
||||
import NoteSet from "../note_set.js";
|
||||
import becca from "../../../becca/becca.js";
|
||||
import { normalize } from "../../utils.js";
|
||||
import { normalizeSearchText, fuzzyMatchWord, fuzzyMatchWordWithResult } from "../utils/text_utils.js";
|
||||
import { normalizeSearchText, fuzzyMatchWordWithResult } from "../utils/text_utils.js";
|
||||
import beccaService from "../../../becca/becca_service.js";
|
||||
|
||||
class NoteFlatTextExp extends Expression {
|
||||
@@ -23,6 +23,18 @@ class NoteFlatTextExp extends Expression {
|
||||
execute(inputNoteSet: NoteSet, executionContext: any, searchContext: SearchContext) {
|
||||
const resultNoteSet = new NoteSet();
|
||||
|
||||
// Cache normalized titles to avoid redundant normalize+getNoteTitle calls
|
||||
const titleCache = new Map<string, string>();
|
||||
const getNormalizedTitle = (noteId: string, parentNoteId: string): string => {
|
||||
const key = `${noteId}-${parentNoteId}`;
|
||||
let cached = titleCache.get(key);
|
||||
if (cached === undefined) {
|
||||
cached = normalizeSearchText(beccaService.getNoteTitle(noteId, parentNoteId));
|
||||
titleCache.set(key, cached);
|
||||
}
|
||||
return cached;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param note
|
||||
* @param remainingTokens - tokens still needed to be found in the path towards root
|
||||
@@ -38,10 +50,8 @@ class NoteFlatTextExp extends Expression {
|
||||
const noteId = resultPath[resultPath.length - 1];
|
||||
|
||||
if (!resultNoteSet.hasNoteId(noteId)) {
|
||||
// we could get here from multiple paths, the first one wins because the paths
|
||||
// are sorted by importance
|
||||
// Snapshot takenPath since it's mutable
|
||||
executionContext.noteIdToNotePath[noteId] = resultPath;
|
||||
|
||||
resultNoteSet.add(becca.notes[noteId]);
|
||||
}
|
||||
}
|
||||
@@ -50,45 +60,40 @@ class NoteFlatTextExp extends Expression {
|
||||
}
|
||||
|
||||
if (note.parents.length === 0 || note.noteId === "root") {
|
||||
// we've reached root, but there are still remaining tokens -> this candidate note produced no result
|
||||
return;
|
||||
}
|
||||
|
||||
const foundAttrTokens: string[] = [];
|
||||
|
||||
for (const token of remainingTokens) {
|
||||
// Add defensive checks for undefined properties
|
||||
const typeMatches = note.type && note.type.includes(token);
|
||||
const mimeMatches = note.mime && note.mime.includes(token);
|
||||
|
||||
if (typeMatches || mimeMatches) {
|
||||
if ((note.type && note.type.includes(token)) ||
|
||||
(note.mime && note.mime.includes(token))) {
|
||||
foundAttrTokens.push(token);
|
||||
}
|
||||
}
|
||||
|
||||
for (const attribute of note.getOwnedAttributes()) {
|
||||
const normalizedName = normalizeSearchText(attribute.name);
|
||||
const normalizedValue = normalizeSearchText(attribute.value);
|
||||
|
||||
for (const token of remainingTokens) {
|
||||
if (normalizedName.includes(token) || normalizedValue.includes(token)) {
|
||||
if (attribute.normalizedName.includes(token) || attribute.normalizedValue.includes(token)) {
|
||||
foundAttrTokens.push(token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const parentNote of note.parents) {
|
||||
const title = normalizeSearchText(beccaService.getNoteTitle(note.noteId, parentNote.noteId));
|
||||
const foundTokens: string[] = foundAttrTokens.slice();
|
||||
const title = getNormalizedTitle(note.noteId, parentNote.noteId);
|
||||
|
||||
// Use Set for O(1) lookup instead of Array.includes() which is O(n)
|
||||
const foundTokenSet = new Set<string>(foundAttrTokens);
|
||||
|
||||
for (const token of remainingTokens) {
|
||||
if (this.smartMatch(title, token, searchContext)) {
|
||||
foundTokens.push(token);
|
||||
foundTokenSet.add(token);
|
||||
}
|
||||
}
|
||||
|
||||
if (foundTokens.length > 0) {
|
||||
const newRemainingTokens = remainingTokens.filter((token) => !foundTokens.includes(token));
|
||||
if (foundTokenSet.size > 0) {
|
||||
const newRemainingTokens = remainingTokens.filter((token) => !foundTokenSet.has(token));
|
||||
|
||||
searchPathTowardsRoot(parentNote, newRemainingTokens, [note.noteId, ...takenPath]);
|
||||
} else {
|
||||
@@ -99,6 +104,22 @@ class NoteFlatTextExp extends Expression {
|
||||
|
||||
const candidateNotes = this.getCandidateNotes(inputNoteSet, searchContext);
|
||||
|
||||
// Fast path for single-token autocomplete searches:
|
||||
// Skip the expensive recursive parent walk and just use getBestNotePath().
|
||||
// The flat text already matched, so we know the token is present.
|
||||
if (this.tokens.length === 1 && searchContext.autocomplete) {
|
||||
for (const note of candidateNotes) {
|
||||
if (!resultNoteSet.hasNoteId(note.noteId)) {
|
||||
const notePath = note.getBestNotePath();
|
||||
if (notePath) {
|
||||
executionContext.noteIdToNotePath[note.noteId] = notePath;
|
||||
resultNoteSet.add(note);
|
||||
}
|
||||
}
|
||||
}
|
||||
return resultNoteSet;
|
||||
}
|
||||
|
||||
for (const note of candidateNotes) {
|
||||
// autocomplete should be able to find notes by their noteIds as well (only leafs)
|
||||
if (this.tokens.length === 1 && note.noteId.toLowerCase() === this.tokens[0]) {
|
||||
@@ -112,13 +133,13 @@ class NoteFlatTextExp extends Expression {
|
||||
// Add defensive checks for undefined properties
|
||||
const typeMatches = note.type && note.type.includes(token);
|
||||
const mimeMatches = note.mime && note.mime.includes(token);
|
||||
|
||||
|
||||
if (typeMatches || mimeMatches) {
|
||||
foundAttrTokens.push(token);
|
||||
}
|
||||
|
||||
for (const attribute of note.ownedAttributes) {
|
||||
if (normalizeSearchText(attribute.name).includes(token) || normalizeSearchText(attribute.value).includes(token)) {
|
||||
if (attribute.normalizedName.includes(token) || attribute.normalizedValue.includes(token)) {
|
||||
foundAttrTokens.push(token);
|
||||
}
|
||||
}
|
||||
@@ -165,10 +186,25 @@ class NoteFlatTextExp extends Expression {
|
||||
getCandidateNotes(noteSet: NoteSet, searchContext?: SearchContext): BNote[] {
|
||||
const candidateNotes: BNote[] = [];
|
||||
|
||||
for (const note of noteSet.notes) {
|
||||
const normalizedFlatText = normalizeSearchText(note.getFlatText());
|
||||
// Use the pre-built flat text index for fast scanning.
|
||||
// This provides pre-computed flat texts in a parallel array, avoiding
|
||||
// per-note property access overhead at large scale (50K+ notes).
|
||||
const { notes: indexNotes, flatTexts } = becca.getFlatTextIndex();
|
||||
|
||||
// Build a set for quick membership check when noteSet isn't the full set
|
||||
const isFullSet = noteSet.notes.length === indexNotes.length;
|
||||
|
||||
for (let i = 0; i < indexNotes.length; i++) {
|
||||
const note = indexNotes[i];
|
||||
|
||||
// Skip notes not in the input set (only check when not using the full set)
|
||||
if (!isFullSet && !noteSet.hasNoteId(note.noteId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const flatText = flatTexts[i];
|
||||
for (const token of this.tokens) {
|
||||
if (this.smartMatch(normalizedFlatText, token, searchContext)) {
|
||||
if (this.smartMatch(flatText, token, searchContext)) {
|
||||
candidateNotes.push(note);
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"use strict";
|
||||
|
||||
import hoistedNoteService from "../hoisted_note.js";
|
||||
import optionService from "../options.js";
|
||||
import type { SearchParams } from "./services/types.js";
|
||||
|
||||
class SearchContext {
|
||||
@@ -18,6 +19,8 @@ class SearchContext {
|
||||
debug?: boolean;
|
||||
debugInfo: {} | null;
|
||||
fuzzyAttributeSearch: boolean;
|
||||
/** When true, skip the two-phase fuzzy fallback and use the single-token fast path. */
|
||||
autocomplete: boolean;
|
||||
enableFuzzyMatching: boolean; // Controls whether fuzzy matching is enabled for this search phase
|
||||
highlightedTokens: string[];
|
||||
originalQuery: string;
|
||||
@@ -46,7 +49,12 @@ class SearchContext {
|
||||
this.debug = params.debug;
|
||||
this.debugInfo = null;
|
||||
this.fuzzyAttributeSearch = !!params.fuzzyAttributeSearch;
|
||||
this.enableFuzzyMatching = true; // Default to true for backward compatibility
|
||||
this.autocomplete = !!params.autocomplete;
|
||||
try {
|
||||
this.enableFuzzyMatching = optionService.getOptionBool("searchEnableFuzzyMatching");
|
||||
} catch {
|
||||
this.enableFuzzyMatching = true; // Default to true if option not yet initialized
|
||||
}
|
||||
this.highlightedTokens = [];
|
||||
this.originalQuery = "";
|
||||
this.fulltextQuery = ""; // complete fulltext part
|
||||
|
||||
@@ -59,8 +59,9 @@ class SearchResult {
|
||||
this.fuzzyScore = 0; // Reset fuzzy score tracking
|
||||
|
||||
const note = becca.notes[this.noteId];
|
||||
const normalizedQuery = normalizeSearchText(fulltextQuery.toLowerCase());
|
||||
const normalizedTitle = normalizeSearchText(note.title.toLowerCase());
|
||||
// normalizeSearchText already lowercases — no need for .toLowerCase() first
|
||||
const normalizedQuery = normalizeSearchText(fulltextQuery);
|
||||
const normalizedTitle = normalizeSearchText(note.title);
|
||||
|
||||
// Note ID exact match, much higher score
|
||||
if (note.noteId.toLowerCase() === fulltextQuery) {
|
||||
@@ -91,35 +92,37 @@ class SearchResult {
|
||||
}
|
||||
|
||||
addScoreForStrings(tokens: string[], str: string, factor: number, enableFuzzyMatching: boolean = true) {
|
||||
const normalizedStr = normalizeSearchText(str.toLowerCase());
|
||||
// normalizeSearchText already lowercases — no need for .toLowerCase() first
|
||||
const normalizedStr = normalizeSearchText(str);
|
||||
const chunks = normalizedStr.split(" ");
|
||||
|
||||
// Pre-normalize tokens once instead of per-chunk
|
||||
const normalizedTokens = tokens.map(t => normalizeSearchText(t));
|
||||
|
||||
let tokenScore = 0;
|
||||
for (const chunk of chunks) {
|
||||
for (const token of tokens) {
|
||||
const normalizedToken = normalizeSearchText(token.toLowerCase());
|
||||
|
||||
for (let ti = 0; ti < normalizedTokens.length; ti++) {
|
||||
const normalizedToken = normalizedTokens[ti];
|
||||
|
||||
if (chunk === normalizedToken) {
|
||||
tokenScore += SCORE_WEIGHTS.TOKEN_EXACT_MATCH * token.length * factor;
|
||||
tokenScore += SCORE_WEIGHTS.TOKEN_EXACT_MATCH * tokens[ti].length * factor;
|
||||
} else if (chunk.startsWith(normalizedToken)) {
|
||||
tokenScore += SCORE_WEIGHTS.TOKEN_PREFIX_MATCH * token.length * factor;
|
||||
tokenScore += SCORE_WEIGHTS.TOKEN_PREFIX_MATCH * tokens[ti].length * factor;
|
||||
} else if (chunk.includes(normalizedToken)) {
|
||||
tokenScore += SCORE_WEIGHTS.TOKEN_CONTAINS_MATCH * token.length * factor;
|
||||
} else {
|
||||
// Try fuzzy matching for individual tokens with caps applied
|
||||
tokenScore += SCORE_WEIGHTS.TOKEN_CONTAINS_MATCH * tokens[ti].length * factor;
|
||||
} else if (enableFuzzyMatching &&
|
||||
normalizedToken.length >= FUZZY_SEARCH_CONFIG.MIN_FUZZY_TOKEN_LENGTH &&
|
||||
this.fuzzyScore < SCORE_WEIGHTS.MAX_TOTAL_FUZZY_SCORE) {
|
||||
// Only compute edit distance when fuzzy matching is enabled
|
||||
const editDistance = calculateOptimizedEditDistance(chunk, normalizedToken, FUZZY_SEARCH_CONFIG.MAX_EDIT_DISTANCE);
|
||||
if (editDistance <= FUZZY_SEARCH_CONFIG.MAX_EDIT_DISTANCE &&
|
||||
normalizedToken.length >= FUZZY_SEARCH_CONFIG.MIN_FUZZY_TOKEN_LENGTH &&
|
||||
this.fuzzyScore < SCORE_WEIGHTS.MAX_TOTAL_FUZZY_SCORE) {
|
||||
|
||||
if (editDistance <= FUZZY_SEARCH_CONFIG.MAX_EDIT_DISTANCE) {
|
||||
const fuzzyWeight = SCORE_WEIGHTS.TOKEN_FUZZY_MATCH * (1 - editDistance / FUZZY_SEARCH_CONFIG.MAX_EDIT_DISTANCE);
|
||||
// Apply caps: limit token length multiplier and per-token contribution
|
||||
const cappedTokenLength = Math.min(token.length, SCORE_WEIGHTS.MAX_FUZZY_TOKEN_LENGTH_MULTIPLIER);
|
||||
const cappedTokenLength = Math.min(tokens[ti].length, SCORE_WEIGHTS.MAX_FUZZY_TOKEN_LENGTH_MULTIPLIER);
|
||||
const fuzzyTokenScore = Math.min(
|
||||
fuzzyWeight * cappedTokenLength * factor,
|
||||
SCORE_WEIGHTS.MAX_FUZZY_SCORE_PER_TOKEN
|
||||
);
|
||||
|
||||
|
||||
tokenScore += fuzzyTokenScore;
|
||||
this.fuzzyScore += fuzzyTokenScore;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"use strict";
|
||||
|
||||
import normalizeString from "normalize-strings";
|
||||
import lex from "./lex.js";
|
||||
import handleParens from "./handle_parens.js";
|
||||
import parse from "./parse.js";
|
||||
@@ -8,7 +7,7 @@ import SearchResult from "../search_result.js";
|
||||
import SearchContext from "../search_context.js";
|
||||
import becca from "../../../becca/becca.js";
|
||||
import beccaService from "../../../becca/becca_service.js";
|
||||
import { normalize, escapeHtml, escapeRegExp } from "../../utils.js";
|
||||
import { normalize, removeDiacritic, escapeHtml, escapeRegExp } from "../../utils.js";
|
||||
import log from "../../log.js";
|
||||
import hoistedNoteService from "../../hoisted_note.js";
|
||||
import type BNote from "../../../becca/entities/bnote.js";
|
||||
@@ -17,7 +16,6 @@ import type { SearchParams, TokenStructure } from "./types.js";
|
||||
import type Expression from "../expressions/expression.js";
|
||||
import sql from "../../sql.js";
|
||||
import scriptService from "../../script.js";
|
||||
import striptags from "striptags";
|
||||
import protectedSessionService from "../../protected_session.js";
|
||||
|
||||
export interface SearchNoteResult {
|
||||
@@ -250,23 +248,30 @@ function findResultsWithExpression(expression: Expression, searchContext: Search
|
||||
return performSearch(expression, searchContext, false);
|
||||
}
|
||||
|
||||
// For autocomplete searches, skip the expensive two-phase fuzzy fallback.
|
||||
// The user is typing and will refine their query — exact matching is
|
||||
// sufficient and avoids a second full scan of all notes.
|
||||
if (searchContext.autocomplete) {
|
||||
return performSearch(expression, searchContext, false);
|
||||
}
|
||||
|
||||
// Phase 1: Try exact matches first (without fuzzy matching)
|
||||
const exactResults = performSearch(expression, searchContext, false);
|
||||
|
||||
|
||||
// Check if we have sufficient high-quality results
|
||||
const minResultThreshold = 5;
|
||||
const minScoreForQuality = 10; // Minimum score to consider a result "high quality"
|
||||
|
||||
|
||||
const highQualityResults = exactResults.filter(result => result.score >= minScoreForQuality);
|
||||
|
||||
|
||||
// If we have enough high-quality exact matches, return them
|
||||
if (highQualityResults.length >= minResultThreshold) {
|
||||
return exactResults;
|
||||
}
|
||||
|
||||
|
||||
// Phase 2: Add fuzzy matching as fallback when exact matches are insufficient
|
||||
const fuzzyResults = performSearch(expression, searchContext, true);
|
||||
|
||||
|
||||
// Merge results, ensuring exact matches always rank higher than fuzzy matches
|
||||
return mergeExactAndFuzzyResults(exactResults, fuzzyResults);
|
||||
}
|
||||
@@ -448,7 +453,7 @@ function extractContentSnippet(noteId: string, searchTokens: string[], maxLength
|
||||
|
||||
try {
|
||||
let content = note.getContent();
|
||||
|
||||
|
||||
if (!content || typeof content !== "string") {
|
||||
return "";
|
||||
}
|
||||
@@ -464,77 +469,66 @@ function extractContentSnippet(noteId: string, searchTokens: string[], maxLength
|
||||
return ""; // Protected but no session available
|
||||
}
|
||||
|
||||
// Strip HTML tags for text notes
|
||||
// Strip HTML tags for text notes — use fast regex for snippet extraction
|
||||
// (striptags library is ~18x slower and not needed for search snippets)
|
||||
if (note.type === "text") {
|
||||
content = striptags(content);
|
||||
content = content.replace(/<[^>]*>/g, "");
|
||||
}
|
||||
|
||||
// Normalize whitespace while preserving paragraph breaks
|
||||
// First, normalize multiple newlines to double newlines (paragraph breaks)
|
||||
content = content.replace(/\n\s*\n/g, "\n\n");
|
||||
// Then normalize spaces within lines
|
||||
content = content.split('\n').map(line => line.replace(/\s+/g, " ").trim()).join('\n');
|
||||
// Finally trim the whole content
|
||||
content = content.trim();
|
||||
|
||||
if (!content) {
|
||||
return "";
|
||||
}
|
||||
|
||||
// Try to find a snippet around the first matching token
|
||||
const normalizedContent = normalizeString(content.toLowerCase());
|
||||
// Find match position using normalize on the raw stripped content.
|
||||
// We use a single normalize() pass — no need for expensive whitespace
|
||||
// normalization just to find the match index.
|
||||
const normalizedContent = normalize(content);
|
||||
const normalizedTokens = searchTokens.map(token => normalize(token));
|
||||
let snippetStart = 0;
|
||||
let matchFound = false;
|
||||
|
||||
for (const token of searchTokens) {
|
||||
const normalizedToken = normalizeString(token.toLowerCase());
|
||||
for (const normalizedToken of normalizedTokens) {
|
||||
const matchIndex = normalizedContent.indexOf(normalizedToken);
|
||||
|
||||
|
||||
if (matchIndex !== -1) {
|
||||
// Center the snippet around the match
|
||||
snippetStart = Math.max(0, matchIndex - maxLength / 2);
|
||||
matchFound = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Extract snippet
|
||||
let snippet = content.substring(snippetStart, snippetStart + maxLength);
|
||||
// Extract a snippet region from the raw content, then clean only that
|
||||
const snippetRegion = content.substring(snippetStart, snippetStart + maxLength + 100);
|
||||
|
||||
// If snippet contains linebreaks, limit to max 4 lines and override character limit
|
||||
// Normalize whitespace only on the small snippet region
|
||||
let snippet = snippetRegion
|
||||
.replace(/\n\s*\n/g, "\n\n")
|
||||
.replace(/[ \t]+/g, " ")
|
||||
.trim()
|
||||
.substring(0, maxLength);
|
||||
|
||||
// If snippet contains linebreaks, limit to max 4 lines
|
||||
const lines = snippet.split('\n');
|
||||
if (lines.length > 4) {
|
||||
// Find which lines contain the search tokens to ensure they're included
|
||||
const normalizedLines = lines.map(line => normalizeString(line.toLowerCase()));
|
||||
const normalizedTokens = searchTokens.map(token => normalizeString(token.toLowerCase()));
|
||||
|
||||
// Find the first line that contains a search token
|
||||
let firstMatchLine = -1;
|
||||
for (let i = 0; i < normalizedLines.length; i++) {
|
||||
if (normalizedTokens.some(token => normalizedLines[i].includes(token))) {
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const normalizedLine = normalize(lines[i]);
|
||||
if (normalizedTokens.some(token => normalizedLine.includes(token))) {
|
||||
firstMatchLine = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (firstMatchLine !== -1) {
|
||||
// Center the 4-line window around the first match
|
||||
// Try to show 1 line before and 2 lines after the match
|
||||
const startLine = Math.max(0, firstMatchLine - 1);
|
||||
const endLine = Math.min(lines.length, startLine + 4);
|
||||
snippet = lines.slice(startLine, endLine).join('\n');
|
||||
} else {
|
||||
// No match found in lines (shouldn't happen), just take first 4
|
||||
snippet = lines.slice(0, 4).join('\n');
|
||||
}
|
||||
// Add ellipsis if we truncated lines
|
||||
snippet = snippet + "...";
|
||||
} else if (lines.length > 1) {
|
||||
// For multi-line snippets that are 4 or fewer lines, keep them as-is
|
||||
// No need to truncate
|
||||
} else {
|
||||
// Single line content - apply original word boundary logic
|
||||
// Try to start/end at word boundaries
|
||||
} else if (lines.length <= 1) {
|
||||
// Single line content - apply word boundary logic
|
||||
if (snippetStart > 0) {
|
||||
const firstSpace = snippet.search(/\s/);
|
||||
if (firstSpace > 0 && firstSpace < 20) {
|
||||
@@ -542,7 +536,7 @@ function extractContentSnippet(noteId: string, searchTokens: string[], maxLength
|
||||
}
|
||||
snippet = "..." + snippet;
|
||||
}
|
||||
|
||||
|
||||
if (snippetStart + maxLength < content.length) {
|
||||
const lastSpace = snippet.search(/\s[^\s]*$/);
|
||||
if (lastSpace > snippet.length - 20 && lastSpace > 0) {
|
||||
@@ -582,7 +576,7 @@ function extractAttributeSnippet(noteId: string, searchTokens: string[], maxLeng
|
||||
|
||||
// Check if any search token matches the attribute name or value
|
||||
const hasMatch = searchTokens.some(token => {
|
||||
const normalizedToken = normalizeString(token.toLowerCase());
|
||||
const normalizedToken = normalize(token);
|
||||
return attrName.includes(normalizedToken) || attrValue.includes(normalizedToken);
|
||||
});
|
||||
|
||||
@@ -650,7 +644,8 @@ function searchNotesForAutocomplete(query: string, fastSearch: boolean = true) {
|
||||
includeHiddenNotes: true,
|
||||
fuzzyAttributeSearch: true,
|
||||
ignoreInternalAttributes: true,
|
||||
ancestorNoteId: hoistedNoteService.isHoistedInHiddenSubtree() ? "root" : hoistedNoteService.getHoistedNoteId()
|
||||
ancestorNoteId: hoistedNoteService.isHoistedInHiddenSubtree() ? "root" : hoistedNoteService.getHoistedNoteId(),
|
||||
autocomplete: true
|
||||
});
|
||||
|
||||
const allSearchResults = findResultsWithQuery(query, searchContext);
|
||||
@@ -727,37 +722,40 @@ function highlightSearchResults(searchResults: SearchResult[], highlightedTokens
|
||||
}
|
||||
|
||||
for (const result of searchResults) {
|
||||
// Reset token
|
||||
const tokenRegex = new RegExp(escapeRegExp(token), "gi");
|
||||
let match;
|
||||
|
||||
// Highlight in note path title
|
||||
if (result.highlightedNotePathTitle) {
|
||||
const titleRegex = new RegExp(escapeRegExp(token), "gi");
|
||||
while ((match = titleRegex.exec(normalizeString(result.highlightedNotePathTitle))) !== null) {
|
||||
// Compute diacritic-free version ONCE before the loop, not on every iteration
|
||||
let titleNoDiacritics = removeDiacritic(result.highlightedNotePathTitle);
|
||||
while ((match = titleRegex.exec(titleNoDiacritics)) !== null) {
|
||||
result.highlightedNotePathTitle = wrapText(result.highlightedNotePathTitle, match.index, token.length, "{", "}");
|
||||
// 2 characters are added, so we need to adjust the index
|
||||
// 2 characters are added, so we need to adjust the index and re-derive
|
||||
titleRegex.lastIndex += 2;
|
||||
titleNoDiacritics = removeDiacritic(result.highlightedNotePathTitle);
|
||||
}
|
||||
}
|
||||
|
||||
// Highlight in content snippet
|
||||
if (result.highlightedContentSnippet) {
|
||||
const contentRegex = new RegExp(escapeRegExp(token), "gi");
|
||||
while ((match = contentRegex.exec(normalizeString(result.highlightedContentSnippet))) !== null) {
|
||||
let contentNoDiacritics = removeDiacritic(result.highlightedContentSnippet);
|
||||
while ((match = contentRegex.exec(contentNoDiacritics)) !== null) {
|
||||
result.highlightedContentSnippet = wrapText(result.highlightedContentSnippet, match.index, token.length, "{", "}");
|
||||
// 2 characters are added, so we need to adjust the index
|
||||
contentRegex.lastIndex += 2;
|
||||
contentNoDiacritics = removeDiacritic(result.highlightedContentSnippet);
|
||||
}
|
||||
}
|
||||
|
||||
// Highlight in attribute snippet
|
||||
if (result.highlightedAttributeSnippet) {
|
||||
const attributeRegex = new RegExp(escapeRegExp(token), "gi");
|
||||
while ((match = attributeRegex.exec(normalizeString(result.highlightedAttributeSnippet))) !== null) {
|
||||
let attrNoDiacritics = removeDiacritic(result.highlightedAttributeSnippet);
|
||||
while ((match = attributeRegex.exec(attrNoDiacritics)) !== null) {
|
||||
result.highlightedAttributeSnippet = wrapText(result.highlightedAttributeSnippet, match.index, token.length, "{", "}");
|
||||
// 2 characters are added, so we need to adjust the index
|
||||
attributeRegex.lastIndex += 2;
|
||||
attrNoDiacritics = removeDiacritic(result.highlightedAttributeSnippet);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,675 @@
|
||||
/**
|
||||
* Comprehensive search benchmark suite.
|
||||
*
|
||||
* Covers many scenarios:
|
||||
* - Single-token, multi-token, phrase-like queries
|
||||
* - Fuzzy matching enabled vs disabled
|
||||
* - Autocomplete vs full search
|
||||
* - Diacritics / unicode queries
|
||||
* - No-match queries
|
||||
* - Varying note counts (1K, 5K, 10K, 20K)
|
||||
* - Warm cache vs cold cache
|
||||
*
|
||||
* All times are in-memory (monkeypatched getContent, no real SQL).
|
||||
*/
|
||||
import { describe, it, expect, afterEach } from "vitest";
|
||||
import searchService from "./search.js";
|
||||
import BNote from "../../../becca/entities/bnote.js";
|
||||
import BBranch from "../../../becca/entities/bbranch.js";
|
||||
import SearchContext from "../search_context.js";
|
||||
import becca from "../../../becca/becca.js";
|
||||
import { NoteBuilder, note } from "../../../test/becca_mocking.js";
|
||||
|
||||
// ── helpers ──────────────────────────────────────────────────────────
|
||||
|
||||
function randomWord(len = 6): string {
|
||||
const chars = "abcdefghijklmnopqrstuvwxyz";
|
||||
let word = "";
|
||||
for (let i = 0; i < len; i++) {
|
||||
word += chars[Math.floor(Math.random() * chars.length)];
|
||||
}
|
||||
return word;
|
||||
}
|
||||
|
||||
function generateHtmlContent(wordCount: number, includeKeywords = false, keywords?: string[]): string {
|
||||
const paragraphs: string[] = [];
|
||||
let wordsRemaining = wordCount;
|
||||
const kws = keywords ?? [];
|
||||
|
||||
while (wordsRemaining > 0) {
|
||||
const paraWords = Math.min(wordsRemaining, 20 + Math.floor(Math.random() * 40));
|
||||
const words: string[] = [];
|
||||
for (let i = 0; i < paraWords; i++) {
|
||||
words.push(randomWord(3 + Math.floor(Math.random() * 10)));
|
||||
}
|
||||
if (includeKeywords && paragraphs.length === 2) {
|
||||
for (let k = 0; k < kws.length; k++) {
|
||||
const pos = Math.min(words.length - 1, Math.floor((words.length / (kws.length + 1)) * (k + 1)));
|
||||
words[pos] = kws[k];
|
||||
}
|
||||
}
|
||||
paragraphs.push(`<p>${words.join(" ")}</p>`);
|
||||
wordsRemaining -= paraWords;
|
||||
}
|
||||
|
||||
return `<html><body>${paragraphs.join("\n")}</body></html>`;
|
||||
}
|
||||
|
||||
function timed<T>(fn: () => T): [T, number] {
|
||||
const start = performance.now();
|
||||
const result = fn();
|
||||
return [result, performance.now() - start];
|
||||
}
|
||||
|
||||
function avg(nums: number[]): number {
|
||||
return nums.reduce((a, b) => a + b, 0) / nums.length;
|
||||
}
|
||||
|
||||
function min(nums: number[]): number {
|
||||
return Math.min(...nums);
|
||||
}
|
||||
|
||||
// ── dataset builder ──────────────────────────────────────────────────
|
||||
|
||||
const syntheticContent: Record<string, string> = {};
|
||||
|
||||
function buildDataset(noteCount: number, opts: {
|
||||
matchFraction?: number;
|
||||
labelsPerNote?: number;
|
||||
depth?: number;
|
||||
contentWordCount?: number;
|
||||
varyContentSize?: boolean;
|
||||
titleKeywords?: string[];
|
||||
contentKeywords?: string[];
|
||||
/** Include notes with diacritics in titles */
|
||||
includeDiacritics?: boolean;
|
||||
} = {}) {
|
||||
const {
|
||||
matchFraction = 0.1,
|
||||
labelsPerNote = 3,
|
||||
depth = 4,
|
||||
contentWordCount = 300,
|
||||
varyContentSize = true,
|
||||
titleKeywords = ["target"],
|
||||
contentKeywords = titleKeywords,
|
||||
includeDiacritics = false,
|
||||
} = opts;
|
||||
|
||||
becca.reset();
|
||||
for (const key of Object.keys(syntheticContent)) {
|
||||
delete syntheticContent[key];
|
||||
}
|
||||
|
||||
const rootNote = new NoteBuilder(new BNote({ noteId: "root", title: "root", type: "text" }));
|
||||
new BBranch({
|
||||
branchId: "none_root",
|
||||
noteId: "root",
|
||||
parentNoteId: "none",
|
||||
notePosition: 10
|
||||
});
|
||||
|
||||
const containers: NoteBuilder[] = [];
|
||||
let parent = rootNote;
|
||||
for (let d = 0; d < depth; d++) {
|
||||
const container = note(`Container_${d}_${randomWord(4)}`);
|
||||
parent.child(container);
|
||||
containers.push(container);
|
||||
parent = container;
|
||||
}
|
||||
|
||||
const matchCount = Math.floor(noteCount * matchFraction);
|
||||
const diacriticTitles = [
|
||||
"résumé", "naïve", "café", "über", "ñoño", "exposé",
|
||||
"Ångström", "Üntersuchung", "São Paulo", "François"
|
||||
];
|
||||
|
||||
for (let i = 0; i < noteCount; i++) {
|
||||
const isMatch = i < matchCount;
|
||||
let title: string;
|
||||
|
||||
if (includeDiacritics && i % 20 === 0) {
|
||||
// Every 20th note gets a diacritics-heavy title
|
||||
const dTitle = diacriticTitles[i % diacriticTitles.length];
|
||||
title = isMatch
|
||||
? `${dTitle} ${titleKeywords.join(" ")} Document ${i}`
|
||||
: `${dTitle} ${randomWord(5)} Note ${i}`;
|
||||
} else {
|
||||
title = isMatch
|
||||
? `${randomWord(5)} ${titleKeywords.join(" ")} ${randomWord(5)} Document ${i}`
|
||||
: `${randomWord(5)} ${randomWord(6)} ${randomWord(4)} Note ${i}`;
|
||||
}
|
||||
|
||||
const n = note(title);
|
||||
|
||||
for (let l = 0; l < labelsPerNote; l++) {
|
||||
const labelName = isMatch && l === 0 ? "category" : `label_${randomWord(4)}`;
|
||||
const labelValue = isMatch && l === 0 ? `important ${titleKeywords[0]}` : randomWord(8);
|
||||
n.label(labelName, labelValue);
|
||||
}
|
||||
|
||||
let noteWordCount = contentWordCount;
|
||||
if (varyContentSize) {
|
||||
const r = Math.random();
|
||||
if (r < 0.2) noteWordCount = Math.floor(contentWordCount * (0.2 + Math.random() * 0.3));
|
||||
else if (r < 0.7) noteWordCount = Math.floor(contentWordCount * (0.7 + Math.random() * 0.6));
|
||||
else if (r < 0.9) noteWordCount = Math.floor(contentWordCount * (1.3 + Math.random() * 0.7));
|
||||
else noteWordCount = Math.floor(contentWordCount * (2.0 + Math.random() * 1.0));
|
||||
}
|
||||
|
||||
const includeContentKeyword = isMatch && contentKeywords.length > 0;
|
||||
syntheticContent[n.note.noteId] = generateHtmlContent(
|
||||
noteWordCount,
|
||||
includeContentKeyword,
|
||||
includeContentKeyword ? contentKeywords : undefined
|
||||
);
|
||||
|
||||
const containerIndex = i % containers.length;
|
||||
containers[containerIndex].child(n);
|
||||
}
|
||||
|
||||
// Monkeypatch getContent()
|
||||
for (const noteObj of Object.values(becca.notes)) {
|
||||
const noteId = noteObj.noteId;
|
||||
if (syntheticContent[noteId]) {
|
||||
(noteObj as any).getContent = () => syntheticContent[noteId];
|
||||
} else {
|
||||
(noteObj as any).getContent = () => "";
|
||||
}
|
||||
}
|
||||
|
||||
return { rootNote, matchCount };
|
||||
}
|
||||
|
||||
// ── benchmark runner ─────────────────────────────────────────────────
|
||||
|
||||
interface BenchmarkResult {
|
||||
query: string;
|
||||
mode: string;
|
||||
noteCount: number;
|
||||
avgMs: number;
|
||||
minMs: number;
|
||||
resultCount: number;
|
||||
}
|
||||
|
||||
function runBenchmark(
|
||||
query: string,
|
||||
mode: "autocomplete" | "fullSearch",
|
||||
fuzzyEnabled: boolean,
|
||||
iterations = 5
|
||||
): BenchmarkResult {
|
||||
const noteCount = Object.keys(becca.notes).length;
|
||||
|
||||
// Warm up
|
||||
if (mode === "autocomplete") {
|
||||
searchService.searchNotesForAutocomplete(query, true);
|
||||
} else {
|
||||
const ctx = new SearchContext({ fastSearch: false });
|
||||
ctx.enableFuzzyMatching = fuzzyEnabled;
|
||||
searchService.findResultsWithQuery(query, ctx);
|
||||
}
|
||||
|
||||
const times: number[] = [];
|
||||
let resultCount = 0;
|
||||
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
if (mode === "autocomplete") {
|
||||
// For autocomplete, fuzzy is controlled by the global option
|
||||
// We'll manipulate enableFuzzyMatching after construction
|
||||
const [results, ms] = timed(() => {
|
||||
// searchNotesForAutocomplete creates its own SearchContext internally
|
||||
// so we need to test via findResultsWithQuery for fuzzy control
|
||||
const ctx = new SearchContext({
|
||||
fastSearch: true,
|
||||
includeHiddenNotes: true,
|
||||
fuzzyAttributeSearch: true,
|
||||
ignoreInternalAttributes: true,
|
||||
autocomplete: true
|
||||
});
|
||||
ctx.enableFuzzyMatching = fuzzyEnabled;
|
||||
return searchService.findResultsWithQuery(query, ctx);
|
||||
});
|
||||
times.push(ms);
|
||||
resultCount = results.length;
|
||||
} else {
|
||||
const [results, ms] = timed(() => {
|
||||
const ctx = new SearchContext({ fastSearch: false });
|
||||
ctx.enableFuzzyMatching = fuzzyEnabled;
|
||||
return searchService.findResultsWithQuery(query, ctx);
|
||||
});
|
||||
times.push(ms);
|
||||
resultCount = results.length;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
query,
|
||||
mode: `${mode}${fuzzyEnabled ? "+fuzzy" : ""}`,
|
||||
noteCount,
|
||||
avgMs: avg(times),
|
||||
minMs: min(times),
|
||||
resultCount
|
||||
};
|
||||
}
|
||||
|
||||
function printTable(title: string, results: BenchmarkResult[]) {
|
||||
console.log(`\n${"═".repeat(110)}`);
|
||||
console.log(` ${title}`);
|
||||
console.log(`${"═".repeat(110)}`);
|
||||
console.log(
|
||||
" " +
|
||||
"Query".padEnd(35) +
|
||||
"Mode".padEnd(22) +
|
||||
"Notes".padStart(7) +
|
||||
"Avg (ms)".padStart(12) +
|
||||
"Min (ms)".padStart(12) +
|
||||
"Results".padStart(10)
|
||||
);
|
||||
console.log(` ${"─".repeat(98)}`);
|
||||
for (const r of results) {
|
||||
console.log(
|
||||
" " +
|
||||
`"${r.query}"`.padEnd(35) +
|
||||
r.mode.padEnd(22) +
|
||||
String(r.noteCount).padStart(7) +
|
||||
r.avgMs.toFixed(1).padStart(12) +
|
||||
r.minMs.toFixed(1).padStart(12) +
|
||||
String(r.resultCount).padStart(10)
|
||||
);
|
||||
}
|
||||
console.log(`${"═".repeat(110)}\n`);
|
||||
}
|
||||
|
||||
// ── tests ────────────────────────────────────────────────────────────
|
||||
|
||||
describe("Comprehensive Search Benchmark", () => {
|
||||
|
||||
afterEach(() => {
|
||||
becca.reset();
|
||||
});
|
||||
|
||||
describe("Single-token queries", () => {
|
||||
for (const noteCount of [1000, 5000, 10000, 20000]) {
|
||||
it(`single token @ ${noteCount} notes — fuzzy on vs off, autocomplete vs full`, () => {
|
||||
buildDataset(noteCount, {
|
||||
matchFraction: 0.15,
|
||||
titleKeywords: ["meeting"],
|
||||
contentKeywords: ["meeting"],
|
||||
contentWordCount: 300,
|
||||
});
|
||||
|
||||
const results: BenchmarkResult[] = [
|
||||
runBenchmark("meeting", "autocomplete", false),
|
||||
runBenchmark("meeting", "autocomplete", true),
|
||||
runBenchmark("meeting", "fullSearch", false),
|
||||
runBenchmark("meeting", "fullSearch", true),
|
||||
];
|
||||
|
||||
printTable(`Single Token "meeting" — ${noteCount} notes`, results);
|
||||
expect(results[0].resultCount).toBeGreaterThan(0);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("Multi-token queries", () => {
|
||||
for (const noteCount of [1000, 5000, 10000, 20000]) {
|
||||
it(`multi token @ ${noteCount} notes — fuzzy on vs off`, () => {
|
||||
buildDataset(noteCount, {
|
||||
matchFraction: 0.15,
|
||||
titleKeywords: ["meeting", "notes", "january"],
|
||||
contentKeywords: ["meeting", "notes", "january"],
|
||||
contentWordCount: 400,
|
||||
});
|
||||
|
||||
const results: BenchmarkResult[] = [
|
||||
// 2-token
|
||||
runBenchmark("meeting notes", "autocomplete", false),
|
||||
runBenchmark("meeting notes", "autocomplete", true),
|
||||
runBenchmark("meeting notes", "fullSearch", false),
|
||||
runBenchmark("meeting notes", "fullSearch", true),
|
||||
// 3-token
|
||||
runBenchmark("meeting notes january", "autocomplete", false),
|
||||
runBenchmark("meeting notes january", "autocomplete", true),
|
||||
runBenchmark("meeting notes january", "fullSearch", false),
|
||||
runBenchmark("meeting notes january", "fullSearch", true),
|
||||
];
|
||||
|
||||
printTable(`Multi Token — ${noteCount} notes`, results);
|
||||
expect(results[0].resultCount).toBeGreaterThan(0);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("No-match queries (worst case — full scan, zero results)", () => {
|
||||
for (const noteCount of [1000, 5000, 10000, 20000]) {
|
||||
it(`no-match @ ${noteCount} notes`, () => {
|
||||
buildDataset(noteCount, {
|
||||
matchFraction: 0.1,
|
||||
titleKeywords: ["target"],
|
||||
contentKeywords: ["target"],
|
||||
contentWordCount: 300,
|
||||
});
|
||||
|
||||
const results: BenchmarkResult[] = [
|
||||
runBenchmark("xyznonexistent", "autocomplete", false),
|
||||
runBenchmark("xyznonexistent", "autocomplete", true),
|
||||
runBenchmark("xyznonexistent", "fullSearch", false),
|
||||
runBenchmark("xyznonexistent", "fullSearch", true),
|
||||
runBenchmark("xyzfoo xyzbar", "autocomplete", false),
|
||||
runBenchmark("xyzfoo xyzbar", "autocomplete", true),
|
||||
runBenchmark("xyzfoo xyzbar", "fullSearch", false),
|
||||
runBenchmark("xyzfoo xyzbar", "fullSearch", true),
|
||||
];
|
||||
|
||||
printTable(`No-Match Queries — ${noteCount} notes`, results);
|
||||
// All should return 0 results
|
||||
for (const r of results) {
|
||||
expect(r.resultCount).toBe(0);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("Diacritics / Unicode queries", () => {
|
||||
for (const noteCount of [1000, 5000, 10000]) {
|
||||
it(`diacritics @ ${noteCount} notes`, () => {
|
||||
buildDataset(noteCount, {
|
||||
matchFraction: 0.15,
|
||||
titleKeywords: ["résumé"],
|
||||
contentKeywords: ["résumé"],
|
||||
contentWordCount: 300,
|
||||
includeDiacritics: true,
|
||||
});
|
||||
|
||||
const results: BenchmarkResult[] = [
|
||||
// Exact diacritics
|
||||
runBenchmark("résumé", "autocomplete", false),
|
||||
runBenchmark("résumé", "autocomplete", true),
|
||||
// ASCII equivalent (should still match via normalize)
|
||||
runBenchmark("resume", "autocomplete", false),
|
||||
runBenchmark("resume", "autocomplete", true),
|
||||
// Full search
|
||||
runBenchmark("résumé", "fullSearch", false),
|
||||
runBenchmark("resume", "fullSearch", false),
|
||||
];
|
||||
|
||||
printTable(`Diacritics "résumé" / "resume" — ${noteCount} notes`, results);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("Partial / prefix queries (simulating typing)", () => {
|
||||
for (const noteCount of [5000, 10000, 20000]) {
|
||||
it(`typing progression @ ${noteCount} notes`, () => {
|
||||
buildDataset(noteCount, {
|
||||
matchFraction: 0.15,
|
||||
titleKeywords: ["documentation"],
|
||||
contentKeywords: ["documentation"],
|
||||
contentWordCount: 300,
|
||||
});
|
||||
|
||||
const results: BenchmarkResult[] = [
|
||||
runBenchmark("d", "autocomplete", false),
|
||||
runBenchmark("do", "autocomplete", false),
|
||||
runBenchmark("doc", "autocomplete", false),
|
||||
runBenchmark("docu", "autocomplete", false),
|
||||
runBenchmark("docum", "autocomplete", false),
|
||||
runBenchmark("document", "autocomplete", false),
|
||||
runBenchmark("documentation", "autocomplete", false),
|
||||
// Same with fuzzy
|
||||
runBenchmark("d", "autocomplete", true),
|
||||
runBenchmark("doc", "autocomplete", true),
|
||||
runBenchmark("document", "autocomplete", true),
|
||||
runBenchmark("documentation", "autocomplete", true),
|
||||
];
|
||||
|
||||
printTable(`Typing Progression "documentation" — ${noteCount} notes`, results);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("Attribute-matching queries", () => {
|
||||
for (const noteCount of [5000, 10000]) {
|
||||
it(`attribute search @ ${noteCount} notes`, () => {
|
||||
buildDataset(noteCount, {
|
||||
matchFraction: 0.15,
|
||||
labelsPerNote: 5,
|
||||
titleKeywords: ["important"],
|
||||
contentKeywords: ["important"],
|
||||
contentWordCount: 200,
|
||||
});
|
||||
|
||||
const results: BenchmarkResult[] = [
|
||||
// "category" is a label name on matching notes
|
||||
runBenchmark("category", "autocomplete", false),
|
||||
runBenchmark("category", "autocomplete", true),
|
||||
runBenchmark("category", "fullSearch", false),
|
||||
runBenchmark("category", "fullSearch", true),
|
||||
// "important" appears in both title and label value
|
||||
runBenchmark("important", "autocomplete", false),
|
||||
runBenchmark("important", "autocomplete", true),
|
||||
];
|
||||
|
||||
printTable(`Attribute Matching — ${noteCount} notes`, results);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("Long queries (4-5 tokens)", () => {
|
||||
for (const noteCount of [5000, 10000]) {
|
||||
it(`long query @ ${noteCount} notes`, () => {
|
||||
buildDataset(noteCount, {
|
||||
matchFraction: 0.10,
|
||||
titleKeywords: ["quarterly", "budget", "review", "report"],
|
||||
contentKeywords: ["quarterly", "budget", "review", "report"],
|
||||
contentWordCount: 500,
|
||||
});
|
||||
|
||||
const results: BenchmarkResult[] = [
|
||||
runBenchmark("quarterly", "autocomplete", false),
|
||||
runBenchmark("quarterly budget", "autocomplete", false),
|
||||
runBenchmark("quarterly budget review", "autocomplete", false),
|
||||
runBenchmark("quarterly budget review report", "autocomplete", false),
|
||||
// Same with fuzzy
|
||||
runBenchmark("quarterly budget review report", "autocomplete", true),
|
||||
// Full search
|
||||
runBenchmark("quarterly budget review report", "fullSearch", false),
|
||||
runBenchmark("quarterly budget review report", "fullSearch", true),
|
||||
];
|
||||
|
||||
printTable(`Long Queries (4 tokens) — ${noteCount} notes`, results);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("Mixed scenario — realistic user session", () => {
|
||||
it("simulates a user session with varied queries @ 10K notes", () => {
|
||||
buildDataset(10000, {
|
||||
matchFraction: 0.15,
|
||||
titleKeywords: ["project", "planning"],
|
||||
contentKeywords: ["project", "planning", "timeline", "budget"],
|
||||
contentWordCount: 400,
|
||||
varyContentSize: true,
|
||||
includeDiacritics: true,
|
||||
depth: 6,
|
||||
});
|
||||
|
||||
const results: BenchmarkResult[] = [
|
||||
// Quick autocomplete lookups (user typing in search bar)
|
||||
runBenchmark("pro", "autocomplete", false),
|
||||
runBenchmark("project", "autocomplete", false),
|
||||
runBenchmark("project plan", "autocomplete", false),
|
||||
|
||||
// Full search (user hits Enter)
|
||||
runBenchmark("project", "fullSearch", false),
|
||||
runBenchmark("project planning", "fullSearch", false),
|
||||
runBenchmark("project planning", "fullSearch", true),
|
||||
|
||||
// Typo / near-miss with fuzzy
|
||||
runBenchmark("projct", "autocomplete", false),
|
||||
runBenchmark("projct", "autocomplete", true),
|
||||
runBenchmark("projct planing", "fullSearch", false),
|
||||
runBenchmark("projct planing", "fullSearch", true),
|
||||
|
||||
// No results
|
||||
runBenchmark("xyznonexistent", "autocomplete", false),
|
||||
runBenchmark("xyznonexistent foo", "fullSearch", true),
|
||||
|
||||
// Short common substring
|
||||
runBenchmark("note", "autocomplete", false),
|
||||
runBenchmark("document", "autocomplete", false),
|
||||
];
|
||||
|
||||
printTable("Realistic User Session — 10K notes", results);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Cache warmth impact", () => {
|
||||
it("cold vs warm flat text index @ 10K notes", () => {
|
||||
buildDataset(10000, {
|
||||
matchFraction: 0.15,
|
||||
titleKeywords: ["target"],
|
||||
contentKeywords: ["target"],
|
||||
contentWordCount: 300,
|
||||
});
|
||||
|
||||
console.log(`\n${"═".repeat(80)}`);
|
||||
console.log(" Cold vs Warm Cache — 10K notes");
|
||||
console.log(`${"═".repeat(80)}`);
|
||||
|
||||
// Cold: first search after dataset build (flat text index not yet built)
|
||||
becca.flatTextIndex = null;
|
||||
becca.dirtyFlatTextNoteIds.clear();
|
||||
const [coldResults, coldMs] = timed(() => {
|
||||
const ctx = new SearchContext({ fastSearch: true, autocomplete: true });
|
||||
ctx.enableFuzzyMatching = false;
|
||||
return searchService.findResultsWithQuery("target", ctx);
|
||||
});
|
||||
console.log(` Cold (index build + search): ${coldMs.toFixed(1)}ms (${coldResults.length} results)`);
|
||||
|
||||
// Warm: subsequent searches reuse the index
|
||||
const warmTimes: number[] = [];
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const [, ms] = timed(() => {
|
||||
const ctx = new SearchContext({ fastSearch: true, autocomplete: true });
|
||||
ctx.enableFuzzyMatching = false;
|
||||
return searchService.findResultsWithQuery("target", ctx);
|
||||
});
|
||||
warmTimes.push(ms);
|
||||
}
|
||||
console.log(` Warm (reuse index, 5 runs): avg ${avg(warmTimes).toFixed(1)}ms min ${min(warmTimes).toFixed(1)}ms`);
|
||||
|
||||
// Incremental: dirty a few notes and search again
|
||||
const noteIds = Object.keys(becca.notes).slice(0, 50);
|
||||
for (const nid of noteIds) {
|
||||
becca.dirtyNoteFlatText(nid);
|
||||
}
|
||||
const [, incrMs] = timed(() => {
|
||||
const ctx = new SearchContext({ fastSearch: true, autocomplete: true });
|
||||
ctx.enableFuzzyMatching = false;
|
||||
return searchService.findResultsWithQuery("target", ctx);
|
||||
});
|
||||
console.log(` Incremental (50 dirty notes): ${incrMs.toFixed(1)}ms`);
|
||||
|
||||
// Full rebuild
|
||||
becca.flatTextIndex = null;
|
||||
const [, rebuildMs] = timed(() => {
|
||||
const ctx = new SearchContext({ fastSearch: true, autocomplete: true });
|
||||
ctx.enableFuzzyMatching = false;
|
||||
return searchService.findResultsWithQuery("target", ctx);
|
||||
});
|
||||
console.log(` Full rebuild (index = null): ${rebuildMs.toFixed(1)}ms`);
|
||||
|
||||
console.log(`${"═".repeat(80)}\n`);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Fuzzy matching effectiveness comparison", () => {
|
||||
it("exact vs fuzzy result quality @ 10K notes", () => {
|
||||
buildDataset(10000, {
|
||||
matchFraction: 0.10,
|
||||
titleKeywords: ["performance"],
|
||||
contentKeywords: ["performance", "optimization"],
|
||||
contentWordCount: 300,
|
||||
});
|
||||
|
||||
console.log(`\n${"═".repeat(90)}`);
|
||||
console.log(" Fuzzy Matching Effectiveness — 10K notes");
|
||||
console.log(`${"═".repeat(90)}`);
|
||||
console.log(
|
||||
" " +
|
||||
"Query".padEnd(30) +
|
||||
"Fuzzy".padEnd(8) +
|
||||
"Time (ms)".padStart(12) +
|
||||
"Results".padStart(10) +
|
||||
" Notes"
|
||||
);
|
||||
console.log(` ${"─".repeat(70)}`);
|
||||
|
||||
const queries = [
|
||||
"performance", // exact match
|
||||
"performanc", // truncated
|
||||
"preformance", // typo
|
||||
"performence", // common misspelling
|
||||
"optimization", // exact match
|
||||
"optimzation", // typo
|
||||
"perf optim", // abbreviated multi
|
||||
];
|
||||
|
||||
for (const query of queries) {
|
||||
for (const fuzzy of [false, true]) {
|
||||
const times: number[] = [];
|
||||
let resultCount = 0;
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const [results, ms] = timed(() => {
|
||||
const ctx = new SearchContext({ fastSearch: true });
|
||||
ctx.enableFuzzyMatching = fuzzy;
|
||||
return searchService.findResultsWithQuery(query, ctx);
|
||||
});
|
||||
times.push(ms);
|
||||
resultCount = results.length;
|
||||
}
|
||||
console.log(
|
||||
" " +
|
||||
`"${query}"`.padEnd(30) +
|
||||
(fuzzy ? "ON" : "OFF").padEnd(8) +
|
||||
avg(times).toFixed(1).padStart(12) +
|
||||
String(resultCount).padStart(10)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`${"═".repeat(90)}\n`);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Scale comparison summary", () => {
|
||||
it("summary table across all note counts", () => {
|
||||
const summaryResults: BenchmarkResult[] = [];
|
||||
|
||||
for (const noteCount of [1000, 5000, 10000, 20000]) {
|
||||
buildDataset(noteCount, {
|
||||
matchFraction: 0.15,
|
||||
titleKeywords: ["meeting", "notes"],
|
||||
contentKeywords: ["meeting", "notes"],
|
||||
contentWordCount: 400,
|
||||
varyContentSize: true,
|
||||
depth: 5,
|
||||
});
|
||||
|
||||
// Core scenarios
|
||||
summaryResults.push(runBenchmark("meeting", "autocomplete", false));
|
||||
summaryResults.push(runBenchmark("meeting", "autocomplete", true));
|
||||
summaryResults.push(runBenchmark("meeting notes", "autocomplete", false));
|
||||
summaryResults.push(runBenchmark("meeting notes", "autocomplete", true));
|
||||
summaryResults.push(runBenchmark("meeting", "fullSearch", false));
|
||||
summaryResults.push(runBenchmark("meeting", "fullSearch", true));
|
||||
summaryResults.push(runBenchmark("meeting notes", "fullSearch", false));
|
||||
summaryResults.push(runBenchmark("meeting notes", "fullSearch", true));
|
||||
summaryResults.push(runBenchmark("xyznonexistent", "autocomplete", false));
|
||||
summaryResults.push(runBenchmark("xyznonexistent", "fullSearch", true));
|
||||
}
|
||||
|
||||
printTable("Scale Comparison Summary", summaryResults);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,665 @@
|
||||
/**
|
||||
* Search performance profiling tests.
|
||||
*
|
||||
* These tests measure where time is spent in the search pipeline.
|
||||
* We monkeypatch note.getContent() to return synthetic HTML content
|
||||
* since unit tests don't have a real SQLite database.
|
||||
*
|
||||
* KNOWN GAPS vs production:
|
||||
* - note.getContent() is instant (monkeypatched) vs ~2ms SQL fetch
|
||||
* - NoteContentFulltextExp.execute() is skipped (no sql.iterateRows)
|
||||
* because fastSearch=true uses only NoteFlatTextExp
|
||||
* - These tests focus on the in-memory/CPU-bound parts of the pipeline
|
||||
*/
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
import searchService from "./search.js";
|
||||
import BNote from "../../../becca/entities/bnote.js";
|
||||
import BBranch from "../../../becca/entities/bbranch.js";
|
||||
import SearchContext from "../search_context.js";
|
||||
import becca from "../../../becca/becca.js";
|
||||
import beccaService from "../../../becca/becca_service.js";
|
||||
import { NoteBuilder, note, id } from "../../../test/becca_mocking.js";
|
||||
import SearchResult from "../search_result.js";
|
||||
import { normalizeSearchText } from "../utils/text_utils.js";
|
||||
|
||||
// ── helpers ──────────────────────────────────────────────────────────
|
||||
|
||||
function randomWord(len = 6): string {
|
||||
const chars = "abcdefghijklmnopqrstuvwxyz";
|
||||
let word = "";
|
||||
for (let i = 0; i < len; i++) {
|
||||
word += chars[Math.floor(Math.random() * chars.length)];
|
||||
}
|
||||
return word;
|
||||
}
|
||||
|
||||
function generateHtmlContent(wordCount: number, includeKeywords = false, keywords?: string[]): string {
|
||||
const paragraphs: string[] = [];
|
||||
let wordsRemaining = wordCount;
|
||||
const kws = keywords ?? ["target"];
|
||||
|
||||
while (wordsRemaining > 0) {
|
||||
const paraWords = Math.min(wordsRemaining, 20 + Math.floor(Math.random() * 40));
|
||||
const words: string[] = [];
|
||||
for (let i = 0; i < paraWords; i++) {
|
||||
words.push(randomWord(3 + Math.floor(Math.random() * 10)));
|
||||
}
|
||||
if (includeKeywords && paragraphs.length === 2) {
|
||||
// Inject all keywords into the paragraph at spaced positions
|
||||
for (let k = 0; k < kws.length; k++) {
|
||||
const pos = Math.min(words.length - 1, Math.floor((words.length / (kws.length + 1)) * (k + 1)));
|
||||
words[pos] = kws[k];
|
||||
}
|
||||
}
|
||||
paragraphs.push(`<p>${words.join(" ")}</p>`);
|
||||
wordsRemaining -= paraWords;
|
||||
}
|
||||
|
||||
return `<html><body>${paragraphs.join("\n")}</body></html>`;
|
||||
}
|
||||
|
||||
function timed<T>(fn: () => T): [T, number] {
|
||||
const start = performance.now();
|
||||
const result = fn();
|
||||
return [result, performance.now() - start];
|
||||
}
|
||||
|
||||
interface TimingEntry { label: string; ms: number; }
|
||||
|
||||
function reportTimings(title: string, timings: TimingEntry[]) {
|
||||
const total = timings.reduce((s, t) => s + t.ms, 0);
|
||||
console.log(`\n=== ${title} (total: ${total.toFixed(1)}ms) ===`);
|
||||
for (const { label, ms } of timings) {
|
||||
const pct = total > 0 ? ((ms / total) * 100).toFixed(0) : "0";
|
||||
const bar = "#".repeat(Math.max(1, Math.round(ms / total * 40)));
|
||||
console.log(` ${label.padEnd(55)} ${ms.toFixed(1).padStart(8)}ms ${pct.padStart(3)}% ${bar}`);
|
||||
}
|
||||
}
|
||||
|
||||
// ── dataset builder ──────────────────────────────────────────────────
|
||||
|
||||
const syntheticContent: Record<string, string> = {};
|
||||
|
||||
function buildDataset(noteCount: number, opts: {
|
||||
matchFraction?: number;
|
||||
labelsPerNote?: number;
|
||||
depth?: number;
|
||||
contentWordCount?: number;
|
||||
/** When set, contentWordCount is treated as a median and actual sizes vary from 0.2x to 3x */
|
||||
varyContentSize?: boolean;
|
||||
/** Keywords to inject into matching notes' titles (default: ["target"]) */
|
||||
titleKeywords?: string[];
|
||||
/** Keywords to inject into matching notes' content (default: same as titleKeywords) */
|
||||
contentKeywords?: string[];
|
||||
} = {}) {
|
||||
const {
|
||||
matchFraction = 0.1,
|
||||
labelsPerNote = 3,
|
||||
depth = 3,
|
||||
contentWordCount = 200,
|
||||
varyContentSize = false,
|
||||
titleKeywords = ["target"],
|
||||
contentKeywords = titleKeywords,
|
||||
} = opts;
|
||||
|
||||
becca.reset();
|
||||
for (const key of Object.keys(syntheticContent)) {
|
||||
delete syntheticContent[key];
|
||||
}
|
||||
|
||||
const rootNote = new NoteBuilder(new BNote({ noteId: "root", title: "root", type: "text" }));
|
||||
new BBranch({
|
||||
branchId: "none_root",
|
||||
noteId: "root",
|
||||
parentNoteId: "none",
|
||||
notePosition: 10
|
||||
});
|
||||
|
||||
const containers: NoteBuilder[] = [];
|
||||
let parent = rootNote;
|
||||
for (let d = 0; d < depth; d++) {
|
||||
const container = note(`Container_${d}_${randomWord(4)}`);
|
||||
parent.child(container);
|
||||
containers.push(container);
|
||||
parent = container;
|
||||
}
|
||||
|
||||
const matchCount = Math.floor(noteCount * matchFraction);
|
||||
|
||||
for (let i = 0; i < noteCount; i++) {
|
||||
const isMatch = i < matchCount;
|
||||
const title = isMatch
|
||||
? `${randomWord(5)} ${titleKeywords.join(" ")} ${randomWord(5)} Document ${i}`
|
||||
: `${randomWord(5)} ${randomWord(6)} ${randomWord(4)} Note ${i}`;
|
||||
|
||||
const n = note(title);
|
||||
|
||||
for (let l = 0; l < labelsPerNote; l++) {
|
||||
const labelName = isMatch && l === 0 ? "category" : `label_${randomWord(4)}`;
|
||||
const labelValue = isMatch && l === 0 ? `important ${titleKeywords[0]}` : randomWord(8);
|
||||
n.label(labelName, labelValue);
|
||||
}
|
||||
|
||||
// Vary content size: 0.2x to 3x the median, producing a realistic
|
||||
// mix of short stubs, medium notes, and long documents.
|
||||
let noteWordCount = contentWordCount;
|
||||
if (varyContentSize) {
|
||||
const r = Math.random();
|
||||
if (r < 0.2) {
|
||||
noteWordCount = Math.floor(contentWordCount * (0.2 + Math.random() * 0.3)); // 20-50% (short stubs)
|
||||
} else if (r < 0.7) {
|
||||
noteWordCount = Math.floor(contentWordCount * (0.7 + Math.random() * 0.6)); // 70-130% (medium)
|
||||
} else if (r < 0.9) {
|
||||
noteWordCount = Math.floor(contentWordCount * (1.3 + Math.random() * 0.7)); // 130-200% (long)
|
||||
} else {
|
||||
noteWordCount = Math.floor(contentWordCount * (2.0 + Math.random() * 1.0)); // 200-300% (very long)
|
||||
}
|
||||
}
|
||||
|
||||
const includeContentKeyword = isMatch && contentKeywords.length > 0;
|
||||
syntheticContent[n.note.noteId] = generateHtmlContent(
|
||||
noteWordCount,
|
||||
includeContentKeyword,
|
||||
includeContentKeyword ? contentKeywords : undefined
|
||||
);
|
||||
|
||||
const containerIndex = i % containers.length;
|
||||
containers[containerIndex].child(n);
|
||||
}
|
||||
|
||||
// Monkeypatch getContent()
|
||||
for (const noteObj of Object.values(becca.notes)) {
|
||||
const noteId = noteObj.noteId;
|
||||
if (syntheticContent[noteId]) {
|
||||
(noteObj as any).getContent = () => syntheticContent[noteId];
|
||||
} else {
|
||||
(noteObj as any).getContent = () => "";
|
||||
}
|
||||
}
|
||||
|
||||
return { rootNote, matchCount };
|
||||
}
|
||||
|
||||
// ── profiling tests ──────────────────────────────────────────────────
|
||||
|
||||
describe("Search Profiling", () => {
|
||||
|
||||
afterEach(() => {
|
||||
becca.reset();
|
||||
});
|
||||
|
||||
/**
|
||||
* Break down the autocomplete pipeline into every individual stage,
|
||||
* including previously unmeasured operations like getBestNotePath,
|
||||
* SearchResult construction, and getNoteTitleForPath.
|
||||
*/
|
||||
describe("Granular autocomplete pipeline", () => {
|
||||
|
||||
for (const noteCount of [500, 2000, 5000, 10000]) {
|
||||
it(`granular breakdown with ${noteCount} notes`, () => {
|
||||
const timings: TimingEntry[] = [];
|
||||
|
||||
const [, buildMs] = timed(() => buildDataset(noteCount, {
|
||||
matchFraction: 0.2,
|
||||
contentWordCount: 300,
|
||||
depth: 5
|
||||
}));
|
||||
timings.push({ label: `Dataset build (${noteCount} notes)`, ms: buildMs });
|
||||
|
||||
// === NoteFlatTextExp: getCandidateNotes ===
|
||||
// This calls getFlatText() + normalizeSearchText() for EVERY note
|
||||
const allNotes = Object.values(becca.notes);
|
||||
for (const n of allNotes) n.invalidateThisCache();
|
||||
|
||||
const [, candidateMs] = timed(() => {
|
||||
const token = normalizeSearchText("target");
|
||||
let count = 0;
|
||||
for (const n of allNotes) {
|
||||
const flatText = normalizeSearchText(n.getFlatText());
|
||||
if (flatText.includes(token)) count++;
|
||||
}
|
||||
return count;
|
||||
});
|
||||
timings.push({ label: `getCandidateNotes simulation (cold caches)`, ms: candidateMs });
|
||||
|
||||
// Warm cache version
|
||||
const [candidateCount, candidateWarmMs] = timed(() => {
|
||||
const token = normalizeSearchText("target");
|
||||
let count = 0;
|
||||
for (const n of allNotes) {
|
||||
const flatText = normalizeSearchText(n.getFlatText());
|
||||
if (flatText.includes(token)) count++;
|
||||
}
|
||||
return count;
|
||||
});
|
||||
timings.push({ label: `getCandidateNotes simulation (warm caches)`, ms: candidateWarmMs });
|
||||
|
||||
// === getBestNotePath for each candidate ===
|
||||
const candidates = allNotes.filter(n => {
|
||||
const flatText = normalizeSearchText(n.getFlatText());
|
||||
return flatText.includes("target");
|
||||
});
|
||||
|
||||
const [, pathMs] = timed(() => {
|
||||
for (const n of candidates) {
|
||||
n.getBestNotePath();
|
||||
}
|
||||
});
|
||||
timings.push({ label: `getBestNotePath (${candidates.length} notes)`, ms: pathMs });
|
||||
|
||||
// === SearchResult construction (includes getNoteTitleForPath) ===
|
||||
const paths = candidates.map(n => n.getBestNotePath()).filter(Boolean);
|
||||
|
||||
const [searchResults, srMs] = timed(() => {
|
||||
return paths.map(p => new SearchResult(p));
|
||||
});
|
||||
timings.push({ label: `SearchResult construction (${paths.length} results)`, ms: srMs });
|
||||
|
||||
// === computeScore ===
|
||||
const [, scoreMs] = timed(() => {
|
||||
for (const r of searchResults) {
|
||||
r.computeScore("target", ["target"], true);
|
||||
}
|
||||
});
|
||||
timings.push({ label: `computeScore with fuzzy (${searchResults.length} results)`, ms: scoreMs });
|
||||
|
||||
const [, scoreNoFuzzyMs] = timed(() => {
|
||||
for (const r of searchResults) {
|
||||
r.computeScore("target", ["target"], false);
|
||||
}
|
||||
});
|
||||
timings.push({ label: `computeScore no-fuzzy`, ms: scoreNoFuzzyMs });
|
||||
|
||||
// === Sorting ===
|
||||
const [, sortMs] = timed(() => {
|
||||
searchResults.sort((a, b) => {
|
||||
if (a.score !== b.score) return b.score - a.score;
|
||||
if (a.notePathArray.length === b.notePathArray.length) {
|
||||
return a.notePathTitle < b.notePathTitle ? -1 : 1;
|
||||
}
|
||||
return a.notePathArray.length - b.notePathArray.length;
|
||||
});
|
||||
});
|
||||
timings.push({ label: `Sort results`, ms: sortMs });
|
||||
|
||||
// === Trim + content snippet extraction ===
|
||||
const trimmed = searchResults.slice(0, 200);
|
||||
|
||||
const [, snippetMs] = timed(() => {
|
||||
for (const r of trimmed) {
|
||||
r.contentSnippet = searchService.extractContentSnippet(
|
||||
r.noteId, ["target"]
|
||||
);
|
||||
}
|
||||
});
|
||||
timings.push({ label: `Content snippet extraction (${trimmed.length} results)`, ms: snippetMs });
|
||||
|
||||
const [, attrMs] = timed(() => {
|
||||
for (const r of trimmed) {
|
||||
r.attributeSnippet = searchService.extractAttributeSnippet(
|
||||
r.noteId, ["target"]
|
||||
);
|
||||
}
|
||||
});
|
||||
timings.push({ label: `Attribute snippet extraction`, ms: attrMs });
|
||||
|
||||
// === Highlighting ===
|
||||
const [, hlMs] = timed(() => {
|
||||
searchService.highlightSearchResults(trimmed, ["target"]);
|
||||
});
|
||||
timings.push({ label: `Highlighting`, ms: hlMs });
|
||||
|
||||
// === Final mapping (getNoteTitleAndIcon) ===
|
||||
const [, mapMs] = timed(() => {
|
||||
for (const r of trimmed) {
|
||||
beccaService.getNoteTitleAndIcon(r.noteId);
|
||||
}
|
||||
});
|
||||
timings.push({ label: `getNoteTitleAndIcon (${trimmed.length} results)`, ms: mapMs });
|
||||
|
||||
// === Full autocomplete for comparison ===
|
||||
const [autoResults, autoMs] = timed(() => {
|
||||
return searchService.searchNotesForAutocomplete("target", true);
|
||||
});
|
||||
timings.push({ label: `Full autocomplete call (end-to-end)`, ms: autoMs });
|
||||
|
||||
reportTimings(`Granular Autocomplete — ${noteCount} notes`, timings);
|
||||
expect(autoResults.length).toBeGreaterThan(0);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Test the specific cost of normalizeSearchText which is called
|
||||
* pervasively throughout the pipeline.
|
||||
*/
|
||||
describe("normalizeSearchText cost", () => {
|
||||
|
||||
it("profile normalizeSearchText at scale", () => {
|
||||
buildDataset(5000, { matchFraction: 0.2, contentWordCount: 100 });
|
||||
|
||||
// Generate various text lengths to profile
|
||||
const shortTexts = Array.from({ length: 5000 }, () => randomWord(10));
|
||||
const mediumTexts = Array.from({ length: 5000 }, () =>
|
||||
Array.from({ length: 20 }, () => randomWord(6)).join(" ")
|
||||
);
|
||||
const longTexts = Object.values(becca.notes).map(n => n.getFlatText());
|
||||
|
||||
console.log("\n=== normalizeSearchText cost ===");
|
||||
|
||||
const [, shortMs] = timed(() => {
|
||||
for (const t of shortTexts) normalizeSearchText(t);
|
||||
});
|
||||
console.log(` 5000 short texts (10 chars): ${shortMs.toFixed(1)}ms (${(shortMs/5000*1000).toFixed(1)}µs/call)`);
|
||||
|
||||
const [, medMs] = timed(() => {
|
||||
for (const t of mediumTexts) normalizeSearchText(t);
|
||||
});
|
||||
console.log(` 5000 medium texts (120 chars): ${medMs.toFixed(1)}ms (${(medMs/5000*1000).toFixed(1)}µs/call)`);
|
||||
|
||||
const [, longMs] = timed(() => {
|
||||
for (const t of longTexts) normalizeSearchText(t);
|
||||
});
|
||||
console.log(` ${longTexts.length} flat texts (varying): ${longMs.toFixed(1)}ms (${(longMs/longTexts.length*1000).toFixed(1)}µs/call)`);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Test the searchPathTowardsRoot recursive walk which runs
|
||||
* for every candidate note in NoteFlatTextExp.
|
||||
*/
|
||||
describe("searchPathTowardsRoot cost", () => {
|
||||
|
||||
it("profile recursive walk with varying hierarchy depth", () => {
|
||||
console.log("\n=== Search path walk vs hierarchy depth ===");
|
||||
|
||||
for (const depth of [3, 5, 8, 12]) {
|
||||
buildDataset(2000, {
|
||||
matchFraction: 0.15,
|
||||
depth,
|
||||
contentWordCount: 50
|
||||
});
|
||||
|
||||
const [results, ms] = timed(() => {
|
||||
const ctx = new SearchContext({ fastSearch: true });
|
||||
return searchService.findResultsWithQuery("target", ctx);
|
||||
});
|
||||
console.log(` depth=${depth}: ${ms.toFixed(1)}ms (${results.length} results)`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Content snippet extraction scaling — the operation that calls
|
||||
* note.getContent() for each result.
|
||||
*/
|
||||
describe("Content snippet extraction", () => {
|
||||
|
||||
it("profile snippet extraction with varying content sizes", () => {
|
||||
console.log("\n=== Content snippet extraction vs content size ===");
|
||||
|
||||
for (const wordCount of [50, 200, 500, 1000, 2000, 5000]) {
|
||||
buildDataset(500, {
|
||||
matchFraction: 0.5,
|
||||
contentWordCount: wordCount
|
||||
});
|
||||
|
||||
const ctx = new SearchContext({ fastSearch: true });
|
||||
const results = searchService.findResultsWithQuery("target", ctx);
|
||||
const trimmed = results.slice(0, 200);
|
||||
|
||||
const [, ms] = timed(() => {
|
||||
for (const r of trimmed) {
|
||||
r.contentSnippet = searchService.extractContentSnippet(
|
||||
r.noteId, ["target"]
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
const avgContentLen = Object.values(syntheticContent)
|
||||
.slice(0, 100)
|
||||
.reduce((s, c) => s + c.length, 0) / 100;
|
||||
|
||||
console.log(` ${String(wordCount).padStart(5)} words/note (avg ${Math.round(avgContentLen)} chars) × ${trimmed.length} results: ${ms.toFixed(1)}ms (${(ms / trimmed.length).toFixed(3)}ms/note)`);
|
||||
}
|
||||
});
|
||||
|
||||
it("profile snippet extraction with varying result counts", () => {
|
||||
console.log("\n=== Content snippet extraction vs result count ===");
|
||||
|
||||
buildDataset(2000, {
|
||||
matchFraction: 0.5,
|
||||
contentWordCount: 500
|
||||
});
|
||||
|
||||
const ctx = new SearchContext({ fastSearch: true });
|
||||
const allResults = searchService.findResultsWithQuery("target", ctx);
|
||||
|
||||
for (const count of [5, 10, 20, 50, 100, 200]) {
|
||||
const subset = allResults.slice(0, count);
|
||||
|
||||
const [, ms] = timed(() => {
|
||||
for (const r of subset) {
|
||||
r.contentSnippet = searchService.extractContentSnippet(
|
||||
r.noteId, ["target"]
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
console.log(` ${String(count).padStart(3)} results: ${ms.toFixed(1)}ms (${(ms / count).toFixed(3)}ms/note)`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Two-phase exact/fuzzy search cost.
|
||||
*/
|
||||
describe("Two-phase search cost", () => {
|
||||
|
||||
for (const noteCount of [1000, 5000, 10000]) {
|
||||
it(`exact vs progressive with ${noteCount} notes`, () => {
|
||||
const timings: TimingEntry[] = [];
|
||||
|
||||
buildDataset(noteCount, { matchFraction: 0.005, contentWordCount: 50 });
|
||||
|
||||
const [exactR, exactMs] = timed(() => {
|
||||
const ctx = new SearchContext({ fastSearch: true });
|
||||
ctx.enableFuzzyMatching = false;
|
||||
return searchService.findResultsWithQuery("target", ctx);
|
||||
});
|
||||
timings.push({ label: `Exact-only (${exactR.length} results)`, ms: exactMs });
|
||||
|
||||
const [progR, progMs] = timed(() => {
|
||||
const ctx = new SearchContext({ fastSearch: true });
|
||||
return searchService.findResultsWithQuery("target", ctx);
|
||||
});
|
||||
timings.push({ label: `Progressive exact→fuzzy (${progR.length} results)`, ms: progMs });
|
||||
|
||||
const overhead = progMs - exactMs;
|
||||
timings.push({ label: `Fuzzy phase overhead`, ms: Math.max(0, overhead) });
|
||||
|
||||
reportTimings(`Two-phase — ${noteCount} notes`, timings);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* End-to-end scaling to give the full picture.
|
||||
*/
|
||||
/**
|
||||
* Multi-token search with varying content sizes.
|
||||
* Real users search things like "meeting notes january" — this exercises
|
||||
* the multi-token path (which doesn't use the single-token fast path)
|
||||
* with a realistic mix of note sizes.
|
||||
*/
|
||||
describe("Multi-token search with varying content sizes", () => {
|
||||
|
||||
it("single vs multi-token autocomplete at scale", () => {
|
||||
console.log("\n=== Single vs multi-token autocomplete (varying content sizes) ===");
|
||||
|
||||
for (const noteCount of [1000, 5000, 10000, 20000]) {
|
||||
buildDataset(noteCount, {
|
||||
matchFraction: 0.15,
|
||||
contentWordCount: 400,
|
||||
varyContentSize: true,
|
||||
depth: 5,
|
||||
titleKeywords: ["meeting", "notes", "january"],
|
||||
contentKeywords: ["meeting", "notes", "january"],
|
||||
});
|
||||
|
||||
// Warm up
|
||||
searchService.searchNotesForAutocomplete("meeting", true);
|
||||
|
||||
// Single token
|
||||
const singleTimes: number[] = [];
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const [, ms] = timed(() => searchService.searchNotesForAutocomplete("meeting", true));
|
||||
singleTimes.push(ms);
|
||||
}
|
||||
const singleAvg = singleTimes.reduce((a, b) => a + b, 0) / singleTimes.length;
|
||||
|
||||
// Two tokens
|
||||
const twoTimes: number[] = [];
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const [, ms] = timed(() => searchService.searchNotesForAutocomplete("meeting notes", true));
|
||||
twoTimes.push(ms);
|
||||
}
|
||||
const twoAvg = twoTimes.reduce((a, b) => a + b, 0) / twoTimes.length;
|
||||
|
||||
// Three tokens
|
||||
const threeTimes: number[] = [];
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const [, ms] = timed(() => searchService.searchNotesForAutocomplete("meeting notes january", true));
|
||||
threeTimes.push(ms);
|
||||
}
|
||||
const threeAvg = threeTimes.reduce((a, b) => a + b, 0) / threeTimes.length;
|
||||
|
||||
console.log(
|
||||
` ${String(noteCount).padStart(6)} notes: ` +
|
||||
`1-token ${singleAvg.toFixed(1)}ms ` +
|
||||
`2-token ${twoAvg.toFixed(1)}ms ` +
|
||||
`3-token ${threeAvg.toFixed(1)}ms`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it("multi-token with realistic content size distribution", () => {
|
||||
console.log("\n=== Multi-token search — content size distribution ===");
|
||||
|
||||
buildDataset(5000, {
|
||||
matchFraction: 0.15,
|
||||
contentWordCount: 400,
|
||||
varyContentSize: true,
|
||||
depth: 5,
|
||||
titleKeywords: ["project", "review"],
|
||||
contentKeywords: ["project", "review"],
|
||||
});
|
||||
|
||||
// Report the actual content size distribution
|
||||
const sizes = Object.values(syntheticContent).map(c => c.length);
|
||||
sizes.sort((a, b) => a - b);
|
||||
const p10 = sizes[Math.floor(sizes.length * 0.1)];
|
||||
const p50 = sizes[Math.floor(sizes.length * 0.5)];
|
||||
const p90 = sizes[Math.floor(sizes.length * 0.9)];
|
||||
const p99 = sizes[Math.floor(sizes.length * 0.99)];
|
||||
console.log(` Content sizes: p10=${p10} p50=${p50} p90=${p90} p99=${p99} chars`);
|
||||
|
||||
// Warm up
|
||||
searchService.searchNotesForAutocomplete("project", true);
|
||||
|
||||
const queries = [
|
||||
"project",
|
||||
"project review",
|
||||
"project review document",
|
||||
`${randomWord(7)}`, // no-match single token
|
||||
`${randomWord(5)} ${randomWord(6)}`, // no-match multi token
|
||||
];
|
||||
|
||||
for (const query of queries) {
|
||||
const times: number[] = [];
|
||||
let resultCount = 0;
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const [r, ms] = timed(() => searchService.searchNotesForAutocomplete(query, true));
|
||||
times.push(ms);
|
||||
resultCount = r.length;
|
||||
}
|
||||
const avg = times.reduce((a, b) => a + b, 0) / times.length;
|
||||
const label = `"${query}"`.padEnd(35);
|
||||
console.log(` ${label} ${avg.toFixed(1)}ms (${resultCount} results)`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("End-to-end scaling", () => {
|
||||
|
||||
it("autocomplete at different scales", () => {
|
||||
console.log("\n=== End-to-end autocomplete scaling ===");
|
||||
console.log(" (fastSearch=true, monkeypatched getContent, no real SQL)");
|
||||
|
||||
for (const noteCount of [100, 500, 1000, 2000, 5000, 10000, 20000]) {
|
||||
buildDataset(noteCount, {
|
||||
matchFraction: 0.2,
|
||||
contentWordCount: 300,
|
||||
depth: 4
|
||||
});
|
||||
|
||||
// Warm up
|
||||
searchService.searchNotesForAutocomplete("target", true);
|
||||
|
||||
const times: number[] = [];
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const [, ms] = timed(() => searchService.searchNotesForAutocomplete("target", true));
|
||||
times.push(ms);
|
||||
}
|
||||
|
||||
const avg = times.reduce((a, b) => a + b, 0) / times.length;
|
||||
const min = Math.min(...times);
|
||||
|
||||
console.log(
|
||||
` ${String(noteCount).padStart(6)} notes: avg ${avg.toFixed(1)}ms ` +
|
||||
`min ${min.toFixed(1)}ms`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it("compare fast vs non-fast search", () => {
|
||||
console.log("\n=== Fast vs non-fast search (no real SQL for content) ===");
|
||||
|
||||
for (const noteCount of [500, 2000, 5000]) {
|
||||
buildDataset(noteCount, {
|
||||
matchFraction: 0.2,
|
||||
contentWordCount: 200,
|
||||
depth: 4
|
||||
});
|
||||
|
||||
const [, fastMs] = timed(() => {
|
||||
const ctx = new SearchContext({ fastSearch: true });
|
||||
return searchService.findResultsWithQuery("target", ctx);
|
||||
});
|
||||
|
||||
// Non-fast search tries NoteContentFulltextExp which uses sql.iterateRows
|
||||
// This will likely fail/return empty since there's no real DB, but we
|
||||
// can still measure the overhead of attempting it
|
||||
let nonFastMs: number;
|
||||
let nonFastCount: number;
|
||||
try {
|
||||
const [results, ms] = timed(() => {
|
||||
const ctx = new SearchContext({ fastSearch: false });
|
||||
return searchService.findResultsWithQuery("target", ctx);
|
||||
});
|
||||
nonFastMs = ms;
|
||||
nonFastCount = results.length;
|
||||
} catch {
|
||||
nonFastMs = -1;
|
||||
nonFastCount = -1;
|
||||
}
|
||||
|
||||
console.log(
|
||||
` ${String(noteCount).padStart(5)} notes: fast=${fastMs.toFixed(1)}ms ` +
|
||||
`non-fast=${nonFastMs >= 0 ? nonFastMs.toFixed(1) + 'ms' : 'FAILED (no real DB)'} ` +
|
||||
`(non-fast results: ${nonFastCount})`
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -21,4 +21,6 @@ export interface SearchParams {
|
||||
limit?: number | null;
|
||||
debug?: boolean;
|
||||
fuzzyAttributeSearch?: boolean;
|
||||
/** When true, skip the two-phase fuzzy fallback and use the single-token fast path. */
|
||||
autocomplete?: boolean;
|
||||
}
|
||||
|
||||
@@ -275,19 +275,17 @@ export function fuzzyMatchWordWithResult(token: string, text: string, maxDistanc
|
||||
}
|
||||
|
||||
try {
|
||||
// Normalize both strings for comparison
|
||||
// Normalize for comparison — some callers pass pre-normalized text,
|
||||
// others don't, so this function must be self-contained.
|
||||
const normalizedToken = token.toLowerCase();
|
||||
const normalizedText = text.toLowerCase();
|
||||
|
||||
|
||||
// Exact match check first (most common case)
|
||||
if (normalizedText.includes(normalizedToken)) {
|
||||
// Find the exact match in the original text to preserve case
|
||||
const exactMatch = text.match(new RegExp(escapeRegExp(token), 'i'));
|
||||
return exactMatch ? exactMatch[0] : token;
|
||||
return token;
|
||||
}
|
||||
|
||||
// For fuzzy matching, we need to check individual words in the text
|
||||
// Split the text into words and check each word against the token
|
||||
|
||||
// For fuzzy matching, split into words and check each against the token
|
||||
const words = normalizedText.split(/\s+/).filter(word => word.length > 0);
|
||||
const originalWords = text.split(/\s+/).filter(word => word.length > 0);
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import safeCompare from "safe-compare";
|
||||
|
||||
import type { NextFunction, Request, Response, Router } from "express";
|
||||
import type { Request, Response, Router } from "express";
|
||||
|
||||
import shaca from "./shaca/shaca.js";
|
||||
import shacaLoader from "./shaca/shaca_loader.js";
|
||||
@@ -10,16 +10,6 @@ import type SNote from "./shaca/entities/snote.js";
|
||||
import type SAttachment from "./shaca/entities/sattachment.js";
|
||||
import { getDefaultTemplatePath, renderNoteContent } from "./content_renderer.js";
|
||||
import utils from "../services/utils.js";
|
||||
import { isShareDbReady } from "./sql.js";
|
||||
|
||||
function assertShareDbReady(_req: Request, res: Response, next: NextFunction) {
|
||||
if (!isShareDbReady()) {
|
||||
res.status(503).send("The application is still initializing. Please try again in a moment.");
|
||||
return;
|
||||
}
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
function addNoIndexHeader(note: SNote, res: Response) {
|
||||
if (note.isLabelTruthy("shareDisallowRobotIndexing")) {
|
||||
@@ -125,8 +115,6 @@ function render404(res: Response) {
|
||||
}
|
||||
|
||||
function register(router: Router) {
|
||||
// Guard: if the share DB is not yet initialized, return 503 for all /share routes.
|
||||
router.use("/share", assertShareDbReady);
|
||||
|
||||
function renderNote(note: SNote, req: Request, res: Response) {
|
||||
if (!note) {
|
||||
|
||||
@@ -5,14 +5,12 @@ import dataDir from "../services/data_dir.js";
|
||||
import sql_init from "../services/sql_init.js";
|
||||
|
||||
let dbConnection!: Database.Database;
|
||||
let dbConnectionReady = false;
|
||||
|
||||
sql_init.dbReady.then(() => {
|
||||
dbConnection = new Database(dataDir.DOCUMENT_PATH, {
|
||||
readonly: true,
|
||||
nativeBinding: process.env.BETTERSQLITE3_NATIVE_PATH || undefined
|
||||
});
|
||||
dbConnectionReady = true;
|
||||
|
||||
[`exit`, `SIGINT`, `SIGUSR1`, `SIGUSR2`, `SIGTERM`].forEach((eventType) => {
|
||||
process.on(eventType, () => {
|
||||
@@ -25,31 +23,18 @@ sql_init.dbReady.then(() => {
|
||||
});
|
||||
});
|
||||
|
||||
function assertDbReady(): void {
|
||||
if (!dbConnectionReady) {
|
||||
throw new Error("Share database connection is not yet ready. The application may still be initializing.");
|
||||
}
|
||||
}
|
||||
|
||||
function getRawRows<T>(query: string, params = []): T[] {
|
||||
assertDbReady();
|
||||
return dbConnection.prepare(query).raw().all(params) as T[];
|
||||
}
|
||||
|
||||
function getRow<T>(query: string, params: string[] = []): T {
|
||||
assertDbReady();
|
||||
return dbConnection.prepare(query).get(params) as T;
|
||||
}
|
||||
|
||||
function getColumn<T>(query: string, params: string[] = []): T[] {
|
||||
assertDbReady();
|
||||
return dbConnection.prepare(query).pluck().all(params) as T[];
|
||||
}
|
||||
|
||||
export function isShareDbReady(): boolean {
|
||||
return dbConnectionReady;
|
||||
}
|
||||
|
||||
export default {
|
||||
getRawRows,
|
||||
getRow,
|
||||
|
||||
@@ -13,10 +13,10 @@
|
||||
"postinstall": "wxt prepare"
|
||||
},
|
||||
"keywords": [],
|
||||
"packageManager": "pnpm@10.32.1",
|
||||
"packageManager": "pnpm@10.32.0",
|
||||
"devDependencies": {
|
||||
"@wxt-dev/auto-icons": "1.1.1",
|
||||
"wxt": "0.20.20"
|
||||
"wxt": "0.20.18"
|
||||
},
|
||||
"dependencies": {
|
||||
"cash-dom": "8.1.5"
|
||||
|
||||
@@ -9,21 +9,21 @@
|
||||
"preview": "pnpm build && vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"i18next": "25.10.3",
|
||||
"i18next": "25.8.17",
|
||||
"i18next-http-backend": "3.0.2",
|
||||
"preact": "10.29.0",
|
||||
"preact-iso": "2.11.1",
|
||||
"preact-render-to-string": "6.6.6",
|
||||
"react-i18next": "16.6.0"
|
||||
"react-i18next": "16.5.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@preact/preset-vite": "2.10.5",
|
||||
"eslint": "10.1.0",
|
||||
"@preact/preset-vite": "2.10.3",
|
||||
"eslint": "10.0.3",
|
||||
"eslint-config-preact": "2.0.0",
|
||||
"typescript": "5.9.3",
|
||||
"user-agent-data-types": "0.4.2",
|
||||
"vite": "8.0.1",
|
||||
"vitest": "4.1.0"
|
||||
"vite": "7.3.1",
|
||||
"vitest": "4.0.18"
|
||||
},
|
||||
"eslintConfig": {
|
||||
"extends": "preact"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user