Compare commits

..

9 Commits

Author SHA1 Message Date
Elian Doran
3101e01478 refactor(desktop-standalone): rename project 2026-03-27 09:27:38 +02:00
Elian Doran
c00a181645 Merge remote-tracking branch 'origin/standalone' into feature/standalone_neutralino 2026-03-27 09:16:58 +02:00
Elian Doran
dd35a49752 chore(standalone-desktop): add title 2026-01-17 11:33:35 +02:00
Elian Doran
b06126baf3 chore(standalone-desktop): basic exit mechanism 2026-01-17 11:32:27 +02:00
Elian Doran
9d86b43909 feat(standalone-desktop): basic dev server 2026-01-17 11:23:44 +02:00
Elian Doran
79432c117c chore(standalone-desktop): trigger Neutralino build 2026-01-17 10:01:12 +02:00
Elian Doran
77371b6658 chore(standalone-desktop): prepare resources for dist 2026-01-17 09:31:21 +02:00
Elian Doran
6ca6b0a1e4 Merge remote-tracking branch 'origin/standalone' into feature/standalone_neutralino 2026-01-17 09:19:56 +02:00
Elian Doran
3e6678ad9e chore(standalone-desktop): init with Neutralino 2026-01-17 09:07:27 +02:00
988 changed files with 26743 additions and 55643 deletions

View File

@@ -1,7 +0,0 @@
{
"permissions": {
"allow": [
"Bash(gh issue *)"
]
}
}

View File

@@ -66,20 +66,12 @@ runs:
if: ${{ inputs.os == 'linux' }}
shell: ${{ inputs.shell }}
run: |
sudo apt-get update && sudo apt-get install rpm flatpak-builder elfutils libfuse2
sudo apt-get update && sudo apt-get install rpm flatpak-builder elfutils
flatpak remote-add --user --if-not-exists flathub https://flathub.org/repo/flathub.flatpakrepo
FLATPAK_ARCH=$(if [[ ${{ inputs.arch }} = 'arm64' ]]; then echo 'aarch64'; else echo 'x86_64'; fi)
FLATPAK_VERSION='24.08'
flatpak install --user --no-deps --arch $FLATPAK_ARCH --assumeyes runtime/org.freedesktop.Platform/$FLATPAK_ARCH/$FLATPAK_VERSION runtime/org.freedesktop.Sdk/$FLATPAK_ARCH/$FLATPAK_VERSION org.electronjs.Electron2.BaseApp/$FLATPAK_ARCH/$FLATPAK_VERSION
- name: Install appimagetool
if: ${{ inputs.os == 'linux' }}
shell: ${{ inputs.shell }}
run: |
APPIMAGETOOL_ARCH=$(if [[ ${{ inputs.arch }} = 'arm64' ]]; then echo 'aarch64'; else echo 'x86_64'; fi)
wget -q "https://github.com/AppImage/appimagetool/releases/download/continuous/appimagetool-${APPIMAGETOOL_ARCH}.AppImage" -O /usr/local/bin/appimagetool
chmod +x /usr/local/bin/appimagetool
- name: Update build info
shell: ${{ inputs.shell }}
run: pnpm run chore:update-build-info
@@ -98,14 +90,6 @@ runs:
TARGET_ARCH: ${{ inputs.arch }}
run: pnpm run --filter desktop electron-forge:make --arch=${{ inputs.arch }} --platform=${{ inputs.forge_platform }}
- name: Build AppImage
if: ${{ inputs.os == 'linux' }}
shell: ${{ inputs.shell }}
env:
TRILIUM_ARTIFACT_NAME_HINT: TriliumNotes-${{ github.ref_name }}-${{ inputs.os }}-${{ inputs.arch }}
APPIMAGE_EXTRACT_AND_RUN: "1"
run: bash apps/desktop/scripts/build-appimage.sh ${{ inputs.arch }}
# Add DMG signing step
- name: Sign DMG
if: inputs.os == 'macos'

View File

@@ -8,7 +8,7 @@ inputs:
runs:
using: composite
steps:
- uses: pnpm/action-setup@v6
- uses: pnpm/action-setup@v5
- name: Set up node & dependencies
uses: actions/setup-node@v6
with:

View File

@@ -55,7 +55,7 @@ runs:
# Post deployment URL as PR comment
- name: Comment PR with Preview URL
if: github.event_name == 'pull_request'
uses: actions/github-script@v9
uses: actions/github-script@v8
env:
COMMENT_BODY: ${{ inputs.comment_body }}
PRODUCTION_URL: ${{ inputs.production_url }}

View File

@@ -1,7 +1,5 @@
# Trilium Notes - AI Coding Agent Instructions
> **Note**: When updating this file, also update `CLAUDE.md` in the repository root to keep both AI coding assistants in sync.
## Project Overview
Trilium Notes is a hierarchical note-taking application with advanced features like synchronization, scripting, and rich text editing. Built as a TypeScript monorepo using pnpm, it implements a three-layer caching architecture (Becca/Froca/Shaca) with a widget-based UI system and supports extensive user scripting capabilities.
@@ -117,15 +115,6 @@ class MyNoteWidget extends NoteContextAwareWidget {
**Important**: Widgets use jQuery (`this.$widget`) for DOM manipulation. Don't mix React patterns here.
### Reusable Preact Components
Common UI components are available in `apps/client/src/widgets/react/` — prefer reusing these over creating custom implementations:
- `NoItems` - Empty state placeholder with icon and message (use for "no results", "too many items", error states)
- `ActionButton` - Consistent button styling with icon support
- `FormTextBox` - Text input with validation and controlled input handling
- `Slider` - Range slider with label
- `Checkbox`, `RadioButton` - Form controls
- `CollapsibleSection` - Expandable content sections
## Development Workflow
### Running & Testing
@@ -154,7 +143,7 @@ pnpm desktop:build # Build desktop application
### Test Organization
- **Server tests** (`apps/server/spec/`): Must run sequentially (shared database state)
- **Client tests** (`apps/client/src/`): Can run in parallel
- **E2E tests** (`packages/trilium-e2e/`): Shared Playwright tests, run via `pnpm --filter server e2e` or `pnpm --filter client-standalone e2e`
- **E2E tests** (`apps/server-e2e/`): Use Playwright for integration testing
- **ETAPI tests** (`apps/server/spec/etapi/`): External API contract tests
**Pattern**: When adding new API endpoints, add tests in `spec/etapi/` following existing patterns (see `search.spec.ts`).
@@ -197,14 +186,6 @@ When adding query parameters to ETAPI endpoints (`apps/server/src/etapi/`), main
**Auth note**: ETAPI uses basic auth with tokens. Internal API endpoints trust the frontend.
### Adding New LLM Tools
Tools are defined using `defineTools()` in `apps/server/src/services/llm/tools/` and automatically registered for both the LLM chat and MCP server.
1. Add the tool definition in the appropriate module (`note_tools.ts`, `attribute_tools.ts`, `hierarchy_tools.ts`) or create a new module
2. Each tool needs: `description`, `inputSchema` (Zod), `execute` function, and optionally `mutates: true` for write operations or `needsContext: true` for tools that need the current note context
3. If creating a new module, wrap tools in `defineTools({...})` and add the registry to `allToolRegistries` in `tools/index.ts`
4. Add a client-side friendly name in `apps/client/src/translations/en/translation.json` under `llm.tools.<tool_name>` — use **imperative tense** (e.g. "Search notes", "Create note", "Get attributes"), not present continuous
### Database Migrations
- Add scripts in `apps/server/src/migrations/YYMMDD_HHMM__description.sql`
- Update schema in `apps/server/src/assets/db/schema.sql`
@@ -232,12 +213,6 @@ Tools are defined using `defineTools()` in `apps/server/src/services/llm/tools/`
10. **Attribute inheritance can be complex** - When checking for labels/relations, use `note.getOwnedAttribute()` for direct attributes or `note.getAttribute()` for inherited ones. Don't assume attributes are directly on the note.
## MCP Server
- Trilium exposes an MCP (Model Context Protocol) server at `http://localhost:8080/mcp`, configured in `.mcp.json`
- The MCP server is **only available when the Trilium server is running** (`pnpm run server:start`)
- It provides tools for reading, searching, and modifying notes directly from the AI assistant
- Use it to interact with actual note data when developing or debugging note-related features
## TypeScript Configuration
- **Project references**: Monorepo uses TypeScript project references (`tsconfig.json`)
@@ -300,12 +275,6 @@ View types are configured via `#viewType` label (e.g., `#viewType=table`). Each
- Register in `packages/ckeditor5/src/plugins.ts`
- See `ckeditor5-admonition`, `ckeditor5-footnotes`, `ckeditor5-math`, `ckeditor5-mermaid` for examples
### Updating PDF.js
1. Update `pdfjs-dist` version in `packages/pdfjs-viewer/package.json`
2. Run `npx tsx scripts/update-viewer.ts` from that directory
3. Run `pnpm build` to verify success
4. Commit all changes including updated viewer files
### Database Migrations
- Add migration scripts in `apps/server/src/migrations/YYMMDD_HHMM__description.sql`
- Update schema in `apps/server/src/assets/db/schema.sql`
@@ -330,29 +299,9 @@ Trilium provides powerful user scripting capabilities:
- Translation files in `apps/client/src/translations/`
- Use translation system via `t()` function
- Automatic pluralization: Add `_other` suffix to translation keys (e.g., `item` and `item_other` for singular/plural)
- When a translated string contains **interpolated components** (e.g. links, note references) whose order may vary across languages, use `<Trans>` from `react-i18next` instead of `t()`. This lets translators reorder components freely (e.g. `"<Note/> in <Parent/>"` vs `"in <Parent/>, <Note/>"`)
- When adding a new locale, follow the step-by-step guide in `docs/Developer Guide/Developer Guide/Concepts/Internationalisation Translations/Adding a new locale.md`
#### Client vs Server Translation Usage
- **Client-side**: `import { t } from "../services/i18n"` with keys in `apps/client/src/translations/en/translation.json`
- **Server-side**: `import { t } from "i18next"` with keys in `apps/server/src/assets/translations/en/server.json`
- **Interpolation**: Use `{{variable}}` for normal interpolation; use `{{- variable}}` (with hyphen) for **unescaped** interpolation when the value contains special characters like quotes that shouldn't be HTML-escaped
### Storing User Preferences
- **Do not use `localStorage`** for user preferences — Trilium has a synced options system that persists across devices
- To add a new user preference:
1. Add the option type to `OptionDefinitions` in `packages/commons/src/lib/options_interface.ts`
2. Add a default value in `apps/server/src/services/options_init.ts` in the `defaultOptions` array
3. **Whitelist the option** in `apps/server/src/routes/api/options.ts` by adding it to `ALLOWED_OPTIONS` (required for client updates)
4. Use `useTriliumOption("optionName")` hook in React components to read/write the option
- Available hooks: `useTriliumOption` (string), `useTriliumOptionBool`, `useTriliumOptionInt`, `useTriliumOptionJson`
- See `docs/Developer Guide/Developer Guide/Concepts/Options/Creating a new option.md` for detailed documentation
## Testing Conventions
- **Write concise tests**: Group related assertions together in a single test case rather than creating many one-shot tests
- **Extract and test business logic**: When adding pure business logic (e.g., data transformations, migrations, validations), extract it as a separate function and always write unit tests for it
```typescript
// ETAPI test pattern
describe("etapi/feature", () => {

View File

@@ -1,44 +0,0 @@
name: Claude Code Review
on:
pull_request:
types: [opened, synchronize, ready_for_review, reopened]
# Optional: Only run on specific file changes
# paths:
# - "src/**/*.ts"
# - "src/**/*.tsx"
# - "src/**/*.js"
# - "src/**/*.jsx"
jobs:
claude-review:
# Optional: Filter by PR author
# if: |
# github.event.pull_request.user.login == 'external-contributor' ||
# github.event.pull_request.user.login == 'new-developer' ||
# github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR'
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
issues: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
fetch-depth: 1
- name: Run Claude Code Review
id: claude-review
uses: anthropics/claude-code-action@v1
with:
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
plugin_marketplaces: 'https://github.com/anthropics/claude-code.git'
plugins: 'code-review@claude-code-plugins'
prompt: '/code-review:code-review ${{ github.repository }}/pull/${{ github.event.pull_request.number }}'
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
# or https://code.claude.com/docs/en/cli-reference for available options

View File

@@ -1,50 +0,0 @@
name: Claude Code
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
issues:
types: [opened, assigned]
pull_request_review:
types: [submitted]
jobs:
claude:
if: |
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
issues: write
id-token: write
actions: read # Required for Claude to read CI results on PRs
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
fetch-depth: 1
- name: Run Claude Code
id: claude
uses: anthropics/claude-code-action@v1
with:
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
# This is an optional setting that allows Claude to read CI results on PRs
additional_permissions: |
actions: read
# Optional: Give a custom prompt to Claude. If this is not specified, Claude will perform the instructions specified in the comment that tagged it.
# prompt: 'Update the pull request description to include a summary of changes.'
# Optional: Add claude_args to customize behavior and configuration
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
# or https://code.claude.com/docs/en/cli-reference for available options
# claude_args: '--allowed-tools Bash(gh pr *)'

View File

@@ -51,9 +51,6 @@ jobs:
- name: Install Dependencies
run: pnpm install --frozen-lockfile
- name: Update build info
run: pnpm run chore:update-build-info
- name: Trigger build of app
run: pnpm --filter=client-standalone build

View File

@@ -45,7 +45,7 @@ jobs:
uses: actions/checkout@v6
- name: Setup pnpm
uses: pnpm/action-setup@v6
uses: pnpm/action-setup@v5
- name: Setup Node.js
uses: actions/setup-node@v6

View File

@@ -1,15 +1,9 @@
name: Dev
on:
push:
branches:
- main
- standalone
- "release/*"
branches: [ main, standalone ]
pull_request:
branches:
- main
- standalone
- "release/*"
branches: [ main, standalone ]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@@ -32,7 +26,7 @@ jobs:
- name: Checkout the repository
uses: actions/checkout@v6
- uses: pnpm/action-setup@v6
- uses: pnpm/action-setup@v5
- name: Set up node & dependencies
uses: actions/setup-node@v6
with:
@@ -65,20 +59,13 @@ jobs:
path: apps/server/test-output/vitest/html/
retention-days: 30
- name: Run the client-standalone tests
# Runs the same trilium-core spec set as the server suite, but in
# happy-dom + sql.js WASM via BrowserSqlProvider (see
# apps/client-standalone/src/test_setup.ts). Catches differences
# between the Node-side and browser-side runtimes.
run: pnpm run --filter=client-standalone test
- name: Run CKEditor e2e tests
run: |
pnpm run --filter=ckeditor5-mermaid test
pnpm run --filter=ckeditor5-math test
- name: Run the rest of the tests
run: pnpm run --filter=\!client --filter=\!client-standalone --filter=\!server --filter=\!ckeditor5-mermaid --filter=\!ckeditor5-math test
run: pnpm run --filter=\!client --filter=\!server --filter=\!ckeditor5-mermaid --filter=\!ckeditor5-math test
build_docker:
name: Build Docker image
@@ -87,7 +74,7 @@ jobs:
- test_dev
steps:
- uses: actions/checkout@v6
- uses: pnpm/action-setup@v6
- uses: pnpm/action-setup@v5
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Update build info
@@ -122,7 +109,7 @@ jobs:
- name: Checkout the repository
uses: actions/checkout@v6
- uses: pnpm/action-setup@v6
- uses: pnpm/action-setup@v5
- name: Install dependencies
run: pnpm install --frozen-lockfile

View File

@@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- uses: pnpm/action-setup@v6
- uses: pnpm/action-setup@v5
- name: Set up node & dependencies
uses: actions/setup-node@v6
with:

View File

@@ -2,7 +2,6 @@ on:
push:
branches:
- "main"
- "standalone"
- "feature/update**"
- "feature/server_esm**"
paths-ignore:
@@ -43,7 +42,7 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v4
- uses: pnpm/action-setup@v6
- uses: pnpm/action-setup@v5
- name: Set up node & dependencies
uses: actions/setup-node@v6
with:
@@ -83,7 +82,7 @@ jobs:
require-healthy: true
- name: Run Playwright tests
run: TRILIUM_DOCKER=1 TRILIUM_PORT=8082 pnpm --filter=server e2e
run: TRILIUM_DOCKER=1 TRILIUM_PORT=8082 pnpm --filter=server-e2e e2e
- name: Upload Playwright trace
if: failure()
@@ -143,7 +142,7 @@ jobs:
- name: Checkout repository
uses: actions/checkout@v6
- uses: pnpm/action-setup@v6
- uses: pnpm/action-setup@v5
- name: Set up node & dependencies
uses: actions/setup-node@v6
with:

View File

@@ -1,57 +0,0 @@
name: Mobile
on:
push:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build_android:
name: Build Android APK
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v6
- uses: pnpm/action-setup@v6
- name: Set up Node.js
uses: actions/setup-node@v6
with:
node-version: 24
cache: "pnpm"
- name: Set up JDK 21
uses: actions/setup-java@v5
with:
distribution: temurin
java-version: 21
- name: Set up Gradle
uses: gradle/actions/setup-gradle@v5
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Update build info
run: pnpm run chore:update-build-info
- name: Build client-standalone (webDir for Capacitor)
run: pnpm --filter @triliumnext/mobile build
- name: Sync Capacitor Android project
run: pnpm --filter @triliumnext/mobile exec cap sync android
- name: Assemble debug APK
working-directory: apps/mobile/android
run: ./gradlew assembleDebug --no-daemon
- name: Upload APK
uses: actions/upload-artifact@v7
with:
name: trilium-mobile-debug-apk
path: apps/mobile/android/app/build/outputs/apk/debug/*.apk
retention-days: 14

View File

@@ -42,7 +42,7 @@ jobs:
shell: bash
forge_platform: linux
- name: windows
image: windows-latest
image: win-signing
shell: cmd
forge_platform: win32
# Exclude ARM64 Linux from default matrix to use native runner
@@ -61,7 +61,7 @@ jobs:
runs-on: ${{ matrix.os.image }}
steps:
- uses: actions/checkout@v6
- uses: pnpm/action-setup@v6
- uses: pnpm/action-setup@v5
- name: Set up node & dependencies
uses: actions/setup-node@v6
with:
@@ -69,8 +69,6 @@ jobs:
cache: 'pnpm'
- name: Install dependencies
run: pnpm install --frozen-lockfile
env:
npm_config_package_import_method: copy
- name: Update nightly version
run: pnpm run chore:ci-update-nightly-version
- name: Run the build
@@ -88,10 +86,12 @@ jobs:
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }}
WINDOWS_SIGN_EXECUTABLE: ${{ vars.WINDOWS_SIGN_EXECUTABLE }}
WINDOWS_SIGN_ERROR_LOG: ${{ vars.WINDOWS_SIGN_ERROR_LOG }}
GPG_SIGNING_KEY: ${{ secrets.GPG_SIGN_KEY }}
- name: Publish release
uses: softprops/action-gh-release@v3.0.0
uses: softprops/action-gh-release@v2.6.1
if: ${{ github.event_name != 'pull_request' }}
with:
make_latest: false
@@ -132,7 +132,7 @@ jobs:
arch: ${{ matrix.arch }}
- name: Publish release
uses: softprops/action-gh-release@v3.0.0
uses: softprops/action-gh-release@v2.6.1
if: ${{ github.event_name != 'pull_request' }}
with:
make_latest: false

View File

@@ -14,7 +14,7 @@ permissions:
contents: read
jobs:
e2e-server:
e2e:
strategy:
fail-fast: false
matrix:
@@ -38,7 +38,7 @@ jobs:
filter: tree:0
fetch-depth: 0
- uses: pnpm/action-setup@v6
- uses: pnpm/action-setup@v5
- uses: actions/setup-node@v6
with:
node-version: 24
@@ -73,66 +73,15 @@ jobs:
sleep 10
- name: Server end-to-end tests
run: pnpm --filter server e2e
run: pnpm --filter server-e2e e2e
- name: Upload test report
if: failure()
uses: actions/upload-artifact@v7
with:
name: e2e report ${{ matrix.arch }}
path: apps/server/test-output
path: apps/server-e2e/test-output
- name: Kill the server
if: always()
run: pkill -f trilium || true
e2e-standalone:
strategy:
fail-fast: false
matrix:
include:
- name: linux-x64
os: ubuntu-22.04
- name: linux-arm64
os: ubuntu-24.04-arm
runs-on: ${{ matrix.os }}
name: Standalone E2E tests on ${{ matrix.name }}
env:
TRILIUM_DOCKER: 1
TRILIUM_PORT: 8082
steps:
- uses: actions/checkout@v6
with:
filter: tree:0
fetch-depth: 0
- uses: pnpm/action-setup@v5
- uses: actions/setup-node@v6
with:
node-version: 24
cache: 'pnpm'
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Install Playwright browsers
run: pnpm exec playwright install --with-deps
- name: Build standalone
run: TRILIUM_INTEGRATION_TEST=memory pnpm --filter client-standalone build
- name: Start standalone preview server
run: |
cd apps/client-standalone
pnpm vite preview --port $TRILIUM_PORT --host 127.0.0.1 &
sleep 5
- name: Standalone end-to-end tests
run: pnpm --filter client-standalone e2e
- name: Upload test report
if: failure()
uses: actions/upload-artifact@v7
with:
name: standalone e2e report ${{ matrix.name }}
path: apps/client-standalone/test-output

View File

@@ -17,7 +17,7 @@ jobs:
steps:
- uses: actions/checkout@v6
- uses: pnpm/action-setup@v6
- uses: pnpm/action-setup@v5
- name: Set up node & dependencies
uses: actions/setup-node@v6
with:
@@ -66,7 +66,7 @@ jobs:
runs-on: ${{ matrix.os.image }}
steps:
- uses: actions/checkout@v6
- uses: pnpm/action-setup@v6
- uses: pnpm/action-setup@v5
- name: Set up node & dependencies
uses: actions/setup-node@v6
with:
@@ -150,7 +150,7 @@ jobs:
path: upload
- name: Publish stable release
uses: softprops/action-gh-release@v3.0.0
uses: softprops/action-gh-release@v2.6.1
with:
draft: false
body_path: docs/Release Notes/Release Notes/${{ github.ref_name }}.md

View File

@@ -32,7 +32,7 @@ jobs:
steps:
- uses: actions/checkout@v6
- uses: pnpm/action-setup@v6
- uses: pnpm/action-setup@v5
- name: Set up node & dependencies
uses: actions/setup-node@v6
with:
@@ -58,7 +58,7 @@ jobs:
compression-level: 0
- name: Release web clipper extension
uses: softprops/action-gh-release@v3.0.0
uses: softprops/action-gh-release@v2.6.1
if: ${{ startsWith(github.ref, 'refs/tags/web-clipper-v') }}
with:
draft: false

View File

@@ -26,7 +26,7 @@ jobs:
steps:
- uses: actions/checkout@v6
- uses: pnpm/action-setup@v6
- uses: pnpm/action-setup@v5
- name: Set up node & dependencies
uses: actions/setup-node@v6
with:

118
.mailmap
View File

@@ -1,116 +1,2 @@
# Format: Canonical Name <canonical-email> <commit-email>
# Merges aliases so `git shortlog`, `git log --use-mailmap`, etc. group commits per person.
# Core maintainers
zadam <zadam.apps@gmail.com>
zadam <zadam.apps@gmail.com> <adam.zivner@gmail.com>
zadam <zadam.apps@gmail.com> <adam.zivner@gemalto.com>
Elian Doran <contact@eliandoran.me>
Elian Doran <contact@eliandoran.me> <online@eliandoran.me>
Adorian Doran <adorian@esevo.ro>
Adorian Doran <adorian@esevo.ro> <adoriandoran@gmail.com>
# Contributors with multiple emails / name variants
Panagiotis Papadopoulos <pano_90@gmx.net> <102623907+pano9000@users.noreply.github.com>
Jon Fuller <jonfuller2012@gmail.com>
SiriusXT <1160925501@qq.com>
SiriusXT <1160925501@qq.com> <11609255001@qq.com>
SiriusXT <1160925501@qq.com> <37627919+SiriusXT@users.noreply.github.com>
JYC333 <22962980+JYC333@users.noreply.github.com>
JYC333 <22962980+JYC333@users.noreply.github.com> <yuchuanjin333@gmail.com>
Nriver <6752679+Nriver@users.noreply.github.com>
Francis C. <normitomf@gmail.com>
Francis C. <normitomf@gmail.com> <francistw@users.noreply.github.com>
Thomas Frei <7283497+thfrei@users.noreply.github.com>
hasecilu <hasecilu@tuta.io>
meinzzzz <lukas.geiselhart35@gmail.com>
FliegendeWurst <arne.keller@posteo.de>
FliegendeWurst <arne.keller@posteo.de> <2012gdwu@web.de>
FliegendeWurst <arne.keller@posteo.de> <2012gdwu+github@posteo.de>
MeIchthys <github.com@meichthys.com>
MeIchthys <github.com@meichthys.com> <10717998+meichthys@users.noreply.github.com>
Marcel Wiechmann <marcel.wiechmann@gmail.com>
Marcel Wiechmann <marcel.wiechmann@gmail.com> <github.y3y0w@sl.wiechmann.at>
Tomas Adamek <ad.tomik@seznam.cz>
Tomas Adamek <ad.tomik@seznam.cz> <50672285+Kureii@users.noreply.github.com>
soulsands <407221377@qq.com>
chesspro13 <chesspro13@gmail.com>
sigaloid <69441971+sigaloid@users.noreply.github.com>
Marek Lewandowski <m.lewandowski@cksource.com>
Marek Lewandowski <m.lewandowski@cksource.com> <code@mlewandowski.com>
Marek Lewandowski <m.lewandowski@cksource.com> <mlewand@users.noreply.github.com>
lzinga <lucas.elzinga@outlook.com>
lzinga <lucas.elzinga@outlook.com> <lzinga@users.noreply.github.com>
Sukant Gujar <sukantgujar@yahoo.com>
Matt Wilkie <maphew@gmail.com>
Matt Wilkie <maphew@gmail.com> <matt.wilkie@yukon.ca>
Andreas Haan <andreas.mobil1@googlemail.com>
Potjoe-97 <42873357+Potjoe-97@users.noreply.github.com>
Potjoe-97 <42873357+Potjoe-97@users.noreply.github.com> <giann@LAPTOPT490-GF>
Alex Pietsch <54153428+alexpietsch@users.noreply.github.com>
Laurent Cozic <laurent@cozic.net>
Laurent Cozic <laurent@cozic.net> <laurent22@users.noreply.github.com>
Zexin Yuan <git@yzx9.xyz>
Zexin Yuan <git@yzx9.xyz> <yuan.zx@outlook.com>
hulmgulm <hulmgulm@users.noreply.github.com>
hulmgulm <hulmgulm@users.noreply.github.com> <12165268+hulmgulm@users.noreply.github.com>
hulmgulm <hulmgulm@users.noreply.github.com> <github@hulmgulm.de>
Jules Bertholet <jules.bertholet@gmail.com>
Charles Dagenais <dagenais.charles@gmail.com>
Giulia Ye <yg97.cs@gmail.com>
baddate <37013819+baddate@users.noreply.github.com>
DerVogel101 <128903814+DerVogel101@users.noreply.github.com>
DerVogel101 <128903814+DerVogel101@users.noreply.github.com> <jan.irmer@outlook.de>
Marcello Fuschi <marcellofuschi1@gmail.com>
Jiahao Lee <lijiahao34@live.com>
Dmitry Matveyev <dev@greenfork.me>
Dmitry Matveyev <dev@greenfork.me> <info@greenfork.me>
Grant Zhu <a1065135230@gmail.com>
Sylvain Pasche <sylvain.pasche@gmail.com>
Sylvain Pasche <sylvain.pasche@gmail.com> <spasche@spasche.net>
mm21 <8033134+mm21@users.noreply.github.com>
mm21 <8033134+mm21@users.noreply.github.com> <mm21.dev@gmail.com>
BeatLink <git@beatlink.simplelogin.com>
BeatLink <git@beatlink.simplelogin.com> <github@beatlink.simplelogin.com>
Florian Meißner <161936+Mystler@users.noreply.github.com>
Florian Meißner <161936+Mystler@users.noreply.github.com> <developer@mystler.eu>
zadam <adam.zivner@gmail.com>
zadam <zadam.apps@gmail.com>

View File

@@ -1,8 +0,0 @@
{
"mcpServers": {
"trilium": {
"type": "http",
"url": "http://localhost:8080/mcp"
}
}
}

2
.nvmrc
View File

@@ -1 +1 @@
24.15.0
24.14.1

126
CLAUDE.md
View File

@@ -2,8 +2,6 @@
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
> **Note**: When updating this file, also update `.github/copilot-instructions.md` to keep both AI coding assistants in sync.
## Overview
Trilium Notes is a hierarchical note-taking application with synchronization, scripting, and rich text editing. TypeScript monorepo using pnpm with multiple apps and shared packages.
@@ -59,6 +57,7 @@ apps/
desktop/ # Electron (bundles server + client)
client-standalone/ # Standalone client (WASM + service workers, no Node.js)
standalone-desktop/ # Standalone desktop variant
server-e2e/ # Playwright E2E tests for server
web-clipper/ # Browser extension
website/ # Project website
db-compare/, dump-db/, edit-docs/, build-docs/, icon-pack-builder/
@@ -66,7 +65,6 @@ apps/
packages/
trilium-core/ # Core business logic: entities, services, SQL, sync
commons/ # Shared interfaces and utilities
trilium-e2e/ # Shared Playwright E2E tests
ckeditor5/ # Custom rich text editor bundle
codemirror/ # Code editor integration
highlightjs/ # Syntax highlighting
@@ -122,15 +120,6 @@ Frontend widgets in `apps/client/src/widgets/`:
**Widget lifecycle**: `doRenderBody()` for initial render, `refreshWithNote()` for note changes, `entitiesReloadedEvent({loadResults})` for entity updates. Uses jQuery — don't mix React patterns.
#### Reusable Preact Components
Common UI components are available in `apps/client/src/widgets/react/` — prefer reusing these over creating custom implementations:
- `NoItems` - Empty state placeholder with icon and message (use for "no results", "too many items", error states)
- `ActionButton` - Consistent button styling with icon support
- `FormTextBox` - Text input with validation and controlled input handling
- `Slider` - Range slider with label
- `Checkbox`, `RadioButton` - Form controls
- `CollapsibleSection` - Expandable content sections
Fluent builder pattern: `.child()`, `.class()`, `.css()` chaining with position-based ordering.
### API Architecture
@@ -156,17 +145,6 @@ Fluent builder pattern: `.child()`, `.class()`, `.css()` chaining with position-
- **Barrel import caution** — `import { x } from "@triliumnext/core"` loads ALL core exports. Early-loading modules like `config.ts` should import specific subpaths (e.g. `@triliumnext/core/src/services/utils/index`) to avoid circular dependencies or initialization ordering issues
- **Electron IPC** — In desktop mode, client API calls use Electron IPC (not HTTP). The IPC handler in `apps/server/src/routes/electron.ts` must be registered via `utils.isElectron` from the **server's** utils (which correctly checks `process.versions["electron"]`), not from core's utils
### Binary Utilities
Use utilities from `packages/trilium-core/src/services/utils/binary.ts` for string/buffer conversions instead of manual `TextEncoder`/`TextDecoder` or `Buffer.from()` calls:
- **`wrapStringOrBuffer(input)`** — Converts `string` to `Uint8Array`, returns `Uint8Array` unchanged. Use when a function expects `Uint8Array` but receives `string | Uint8Array`.
- **`unwrapStringOrBuffer(input)`** — Converts `Uint8Array` to `string`, returns `string` unchanged. Use when a function expects `string` but receives `string | Uint8Array`.
- **`encodeBase64(input)`** / **`decodeBase64(input)`** — Base64 encoding/decoding that works in both Node.js and browser.
- **`encodeUtf8(string)`** / **`decodeUtf8(buffer)`** — UTF-8 encoding/decoding.
Import via `import { binary_utils } from "@triliumnext/core"` or directly from the module.
### Database
SQLite via `better-sqlite3`. SQL abstraction in `packages/trilium-core/src/services/sql/` with `DatabaseProvider` interface, prepared statement caching, and transaction support.
@@ -174,60 +152,14 @@ SQLite via `better-sqlite3`. SQL abstraction in `packages/trilium-core/src/servi
- Schema: `apps/server/src/assets/db/schema.sql`
- Migrations: `apps/server/src/migrations/YYMMDD_HHMM__description.sql`
### Testing Strategy
- Server tests run sequentially due to shared database
- Client tests can run in parallel
- E2E tests use Playwright for both server and desktop apps
- Build validation tests check artifact integrity
- **Write concise tests**: Group related assertions together in a single test case rather than creating many one-shot tests
- **Extract and test business logic**: When adding pure business logic (e.g., data transformations, migrations, validations), extract it as a separate function and always write unit tests for it
### Internationalization
- Translation files in `apps/client/src/translations/`
- Supported languages: English, German, Spanish, French, Romanian, Chinese
- **Only add new translation keys to `en/translation.json`** — translations for other languages are managed via Weblate and will be contributed by the community
- Third-party components (e.g., mind-map context menu) should use i18next `t()` for their labels, with the English strings added to `en/translation.json` under a dedicated namespace (e.g., `"mind-map"`)
- When a translated string contains **interpolated components** (e.g. links, note references) whose order may vary across languages, use `<Trans>` from `react-i18next` instead of `t()`. This lets translators reorder components freely (e.g. `"<Note/> in <Parent/>"` vs `"in <Parent/>, <Note/>"`)
- When adding a new locale, follow the step-by-step guide in `docs/Developer Guide/Developer Guide/Concepts/Internationalisation Translations/Adding a new locale.md`
- **Server-side translations** (e.g. hidden subtree titles) go in `apps/server/src/assets/translations/en/server.json`, not in the client `translation.json`
#### Client vs Server Translation Usage
- **Client-side**: `import { t } from "../services/i18n"` with keys in `apps/client/src/translations/en/translation.json`
- **Server-side**: `import { t } from "i18next"` with keys in `apps/server/src/assets/translations/en/server.json`
- **Interpolation**: Use `{{variable}}` for normal interpolation; use `{{- variable}}` (with hyphen) for **unescaped** interpolation when the value contains special characters like quotes that shouldn't be HTML-escaped
### Electron Desktop App
- Desktop entry point: `apps/desktop/src/main.ts`, window management: `apps/server/src/services/window.ts`
- IPC communication: use `electron.ipcMain.on(channel, handler)` on server side, `electron.ipcRenderer.send(channel, data)` on client side
- Electron-only features should check `isElectron()` from `apps/client/src/services/utils.ts` (client) or `utils.isElectron` (server)
### Attribute Inheritance
Three inheritance mechanisms:
1. **Standard**: `note.getInheritableAttributes()` walks parent tree
2. **Child prefix**: `child:label` on parent copies to children
3. **Template relation**: `#template=noteNoteId` includes template's inheritable attributes
### Attribute Inheritance
Use `note.getOwnedAttribute()` for direct, `note.getAttribute()` for inherited.
### Client-Side API Restrictions
- **Do not use `crypto.randomUUID()`** or other Web Crypto APIs that require secure contexts - Trilium can run over HTTP, not just HTTPS
- Use `randomString()` from `apps/client/src/services/utils.ts` for generating IDs instead
### Storing User Preferences
- **Do not use `localStorage`** for user preferences — Trilium has a synced options system that persists across devices
- To add a new user preference:
1. Add the option type to `OptionDefinitions` in `packages/commons/src/lib/options_interface.ts`
2. Add a default value in `apps/server/src/services/options_init.ts` in the `defaultOptions` array
3. **Whitelist the option** in `apps/server/src/routes/api/options.ts` by adding it to the `ALLOWED_OPTIONS` array — **without this, the API will reject changes with "Option 'X' is not allowed to be changed"**
4. If the option should be user-editable in the UI, add a control in the appropriate settings component (e.g., `apps/client/src/widgets/type_widgets/options/other.tsx`) and a translation key in `apps/client/src/translations/en/translation.json`
5. Use `useTriliumOption("optionName")` hook in React components to read/write the option
- Available hooks: `useTriliumOption` (string), `useTriliumOptionBool`, `useTriliumOptionInt`, `useTriliumOptionJson`
- See `docs/Developer Guide/Developer Guide/Concepts/Options/Creating a new option.md` for detailed documentation
### Shared Types Policy
- Types shared between client and server belong in `@triliumnext/commons` (`packages/commons/src/lib/`)
- Import shared types directly from `@triliumnext/commons` - do not re-export them from app-specific modules
- Keep app-specific types (e.g., `LlmProvider` for server, `StreamCallbacks` for client) in their respective apps
## Important Patterns
@@ -249,7 +181,7 @@ Use `note.getOwnedAttribute()` for direct, `note.getAttribute()` for inherited.
- **Server tests** (`apps/server/spec/`): Vitest, must run sequentially (shared DB), forks pool, max 6 workers
- **Client tests** (`apps/client/src/`): Vitest with happy-dom environment, can run in parallel
- **E2E tests** (`packages/trilium-e2e/`): Shared Playwright tests, run via `pnpm --filter server e2e` or `pnpm --filter client-standalone e2e`
- **E2E tests** (`apps/server-e2e/`): Playwright, Chromium, server started automatically on port 8082
- **ETAPI tests** (`apps/server/spec/etapi/`): External API contract tests
## Documentation
@@ -266,55 +198,3 @@ Use `note.getOwnedAttribute()` for direct, `note.getAttribute()` for inherited.
- `apps/client/src/services/froca.ts` — Frontend cache
- `apps/server/src/routes/routes.ts` — API route registration
- `packages/trilium-core/src/services/sql/sql.ts` — Database abstraction
### Adding Hidden System Notes
The hidden subtree (`_hidden`) contains system notes with predictable IDs (prefixed with `_`). Defined in `apps/server/src/services/hidden_subtree.ts` via the `HiddenSubtreeItem` interface from `@triliumnext/commons`.
1. Add the note definition to `buildHiddenSubtreeDefinition()` in `apps/server/src/services/hidden_subtree.ts`
2. Add a translation key for the title in `apps/server/src/assets/translations/en/server.json` under `"hidden-subtree"`
3. The note is auto-created on startup by `checkHiddenSubtree()` — uses deterministic IDs so all sync cluster instances generate the same structure
4. Key properties: `id` (must start with `_`), `title`, `type`, `icon` (format: `bx-icon-name` without `bx ` prefix), `attributes`, `children`, `content`
5. Use `enforceAttributes: true` to keep attributes in sync, `enforceBranches: true` for correct placement, `enforceDeleted: true` to remove deprecated notes
6. For launcher bar entries, see `hidden_subtree_launcherbar.ts`; for templates, see `hidden_subtree_templates.ts`
### Writing to Notes from Server Services
- `note.setContent()` requires a CLS (Continuation Local Storage) context — wrap calls in `cls.init(() => { ... })` (from `apps/server/src/services/cls.ts`)
- Operations called from Express routes already have CLS context; standalone services (schedulers, Electron IPC handlers) do not
### Adding New LLM Tools
Tools are defined using `defineTools()` in `apps/server/src/services/llm/tools/` and automatically registered for both the LLM chat and MCP server.
1. Add the tool definition in the appropriate module (`note_tools.ts`, `attribute_tools.ts`, `attachment_tools.ts`, `hierarchy_tools.ts`) or create a new module
2. Each tool needs: `description`, `inputSchema` (Zod), `execute` function, and optionally `mutates: true` for write operations
3. If creating a new module, wrap tools in `defineTools({...})` and add the registry to `allToolRegistries` in `tools/index.ts`
4. Add a client-side friendly name in `apps/client/src/translations/en/translation.json` under `llm.tools.<tool_name>` — use **imperative tense** (e.g. "Search notes", "Create note", "Get attributes"), not present continuous
5. Use ETAPI (`apps/server/src/etapi/`) as inspiration for what fields to expose, but **do not import ETAPI mappers** — inline the field mappings directly in the tool so the LLM layer stays decoupled from the API layer
### Updating PDF.js
1. Update `pdfjs-dist` version in `packages/pdfjs-viewer/package.json`
2. Run `npx tsx scripts/update-viewer.ts` from that directory
3. Run `pnpm build` to verify success
4. Commit all changes including updated viewer files
### Database Migrations
- Add migration scripts in `apps/server/src/migrations/`
- Update schema in `apps/server/src/assets/db/schema.sql`
### Server-Side Static Assets
- Static assets (templates, SQL, translations, etc.) go in `apps/server/src/assets/`
- Access them at runtime via `RESOURCE_DIR` from `apps/server/src/services/resource_dir.ts` (e.g. `path.join(RESOURCE_DIR, "llm", "skills", "file.md")`)
- **Do not use `import.meta.url`/`fileURLToPath`** to resolve file paths — the server is bundled into CJS for production, so `import.meta.url` will not point to the source directory
- **Do not use `__dirname` with relative paths** from source files — after bundling, `__dirname` points to the bundle output, not the original source tree
## MCP Server
- Trilium exposes an MCP (Model Context Protocol) server at `http://localhost:8080/mcp`, configured in `.mcp.json`
- The MCP server is **only available when the Trilium server is running** (`pnpm run server:start`)
- It provides tools for reading, searching, and modifying notes directly from the AI assistant
- Use it to interact with actual note data when developing or debugging note-related features
## Build System Notes
- Uses pnpm for monorepo management
- Vite for fast development builds
- ESBuild for production optimization
- pnpm workspaces for dependency management
- Docker support with multi-stage builds

View File

@@ -2,87 +2,13 @@
## Supported Versions
Only the latest stable minor release receives security fixes.
In the (still active) 0.X phase of the project only the latest stable minor release is getting bugfixes (including security ones).
For example, if the latest stable version is 0.92.3 and the latest beta is 0.93.0-beta, then only the 0.92.x line will receive security patches. Older versions (like 0.91.x) will not receive fixes.
So e.g. if the latest stable version is 0.42.3 and the latest beta version is 0.43.0-beta, then 0.42 line will still get security fixes but older versions (like 0.41.X) won't get any fixes.
This policy may be altered on a case-by-case basis for critical vulnerabilities.
Description above is a general rule and may be altered on case by case basis.
## Reporting a Vulnerability
**Please report all security vulnerabilities through [GitHub Security Advisories](https://github.com/TriliumNext/Notes/security/advisories/new).**
We do not accept security reports via email, public issues, or other channels. GitHub Security Advisories allows us to:
- Discuss and triage vulnerabilities privately
- Coordinate fixes before public disclosure
- Credit reporters appropriately
- Publish advisories with CVE identifiers
### What to Include
When reporting, please provide:
- A clear description of the vulnerability
- Steps to reproduce or proof-of-concept
- Affected versions (if known)
- Potential impact assessment
- Any suggested mitigations or fixes
### Response Timeline
- **Initial response**: Within 7 days
- **Triage decision**: Within 14 days
- **Fix timeline**: Depends on severity and complexity
## Scope
### In Scope
- Remote code execution
- Authentication/authorization bypass
- Cross-site scripting (XSS) that affects other users
- SQL injection
- Path traversal
- Sensitive data exposure
- Privilege escalation
### Out of Scope (Won't Fix)
The following are considered out of scope or accepted risks:
#### Self-XSS / Self-Injection
Trilium is a personal knowledge base where users have full control over their own data. Users can intentionally create notes containing scripts, HTML, or other executable content. This is by design - Trilium's scripting system allows users to extend functionality with custom JavaScript.
Vulnerabilities that require a user to inject malicious content into their own notes and then view it themselves are not considered security issues.
#### Electron Architecture (nodeIntegration)
Trilium's desktop application runs with `nodeIntegration: true` to enable its powerful scripting features. This is an intentional design decision, similar to VS Code extensions having full system access. We mitigate risks by:
- Sanitizing content at input boundaries
- Fixing specific XSS vectors as they're discovered
- Using Electron fuses to prevent external abuse
#### Authenticated User Actions
Actions that require valid authentication and only affect the authenticated user's own data are generally not vulnerabilities.
#### Denial of Service via Resource Exhaustion
Creating extremely large notes or performing many operations is expected user behavior in a note-taking application.
#### Missing Security Headers on Non-Sensitive Endpoints
We implement security headers where they provide meaningful protection, but may omit them on endpoints where they provide no practical benefit.
## Coordinated Disclosure
We follow a coordinated disclosure process:
1. **Report received** - We acknowledge receipt and begin triage
2. **Fix developed** - We develop and test a fix privately
3. **Release prepared** - Security release is prepared with vague changelog
4. **Users notified** - Release is published, users encouraged to upgrade
5. **Advisory published** - After reasonable upgrade window (typically 2-4 weeks), full advisory is published
We appreciate reporters allowing us time to fix issues before public disclosure. We aim to credit all reporters in published advisories unless they prefer to remain anonymous.
## Security Updates
Security fixes are released as patch versions (e.g., 0.92.1 → 0.92.2) to minimize upgrade friction. We recommend all users keep their installations up to date.
Subscribe to GitHub releases or watch the repository to receive notifications of new releases.
* For low severity vulnerabilities, they can be reported as GitHub issues.
* For severe vulnerabilities, please report it using [GitHub Security Advisories](https://github.com/TriliumNext/Trilium/security/advisories).

View File

@@ -15,16 +15,14 @@
"author": "Elian Doran <contact@eliandoran.me>",
"license": "AGPL-3.0-only",
"packageManager": "pnpm@10.33.0",
"dependencies": {
"@triliumnext/core": "workspace:*",
"@triliumnext/server": "workspace:*"
},
"devDependencies": {
"@redocly/cli": "2.28.0",
"@redocly/cli": "2.25.1",
"archiver": "7.0.1",
"fs-extra": "11.3.4",
"js-yaml": "4.1.1",
"typedoc": "0.28.19",
"typedoc-plugin-missing-exports": "4.1.3"
"react": "19.2.4",
"react-dom": "19.2.4",
"typedoc": "0.28.18",
"typedoc-plugin-missing-exports": "4.1.2"
}
}

View File

@@ -14,18 +14,21 @@
*/
export type {
AbstractBeccaEntity,
BAttachment,
BAttribute,
BBranch,
BEtapiToken,
BNote,
BOption,
BRecentNote,
BRevision
} from "@triliumnext/core";
default as AbstractBeccaEntity
} from "../../server/src/becca/entities/abstract_becca_entity.js";
export type {
default as BAttachment
} from "../../server/src/becca/entities/battachment.js";
export type { default as BAttribute } from "../../server/src/becca/entities/battribute.js";
export type { default as BBranch } from "../../server/src/becca/entities/bbranch.js";
export type { default as BEtapiToken } from "../../server/src/becca/entities/betapi_token.js";
export type { BNote };
export type { default as BOption } from "../../server/src/becca/entities/boption.js";
export type { default as BRecentNote } from "../../server/src/becca/entities/brecent_note.js";
export type { default as BRevision } from "../../server/src/becca/entities/brevision.js";
import { BNote, BackendScriptApi, type BackendScriptApiInterface as Api } from "@triliumnext/core";
import BNote from "../../server/src/becca/entities/bnote.js";
import BackendScriptApi, { type Api } from "../../server/src/services/backend_script_api.js";
export type { Api };

View File

@@ -5,43 +5,10 @@ if (!process.env.TRILIUM_RESOURCE_DIR) {
}
process.env.NODE_ENV = "development";
import { BackupService, getContext, initializeCore, type ImageProvider } from "@triliumnext/core";
import ClsHookedExecutionContext from "@triliumnext/server/src/cls_provider.js";
import NodejsCryptoProvider from "@triliumnext/server/src/crypto_provider.js";
import ServerPlatformProvider from "@triliumnext/server/src/platform_provider.js";
import BetterSqlite3Provider from "@triliumnext/server/src/sql_provider.js";
import NodejsZipProvider from "@triliumnext/server/src/zip_provider.js";
// Stub backup service for build-docs (not used, but required by initializeCore)
class StubBackupService extends BackupService {
constructor() {
super({
getOption: () => "",
getOptionBool: () => false,
setOption: () => {}
});
}
async backupNow(_name: string): Promise<string> {
throw new Error("Backup not supported in build-docs");
}
async getExistingBackups() {
return [];
}
async getBackupContent(_filePath: string): Promise<Uint8Array | null> {
return null;
}
}
// Stub image provider for build-docs (not used, but required by initializeCore)
const stubImageProvider: ImageProvider = {
getImageType: () => null,
processImage: async () => {
throw new Error("Image processing not supported in build-docs");
}
};
import cls from "@triliumnext/server/src/services/cls.js";
import archiver from "archiver";
import { execSync } from "child_process";
import { readFileSync } from "fs";
import { WriteStream } from "fs";
import * as fs from "fs/promises";
import * as fsExtra from "fs-extra";
import yaml from "js-yaml";
@@ -49,37 +16,6 @@ import { dirname, join, resolve } from "path";
import BuildContext from "./context.js";
let initialized = false;
async function initializeBuildEnvironment() {
if (initialized) return;
initialized = true;
const dbProvider = new BetterSqlite3Provider();
dbProvider.loadFromMemory();
const { serverZipExportProviderFactory } = await import("@triliumnext/server/src/services/export/zip/factory.js");
await initializeCore({
dbConfig: {
provider: dbProvider,
isReadOnly: false,
onTransactionCommit: () => {},
onTransactionRollback: () => {}
},
crypto: new NodejsCryptoProvider(),
zip: new NodejsZipProvider(),
zipExportProviderFactory: serverZipExportProviderFactory,
executionContext: new ClsHookedExecutionContext(),
platform: new ServerPlatformProvider(),
schema: readFileSync(require.resolve("@triliumnext/core/src/assets/schema.sql"), "utf-8"),
translations: (await import("@triliumnext/server/src/services/i18n.js")).initializeTranslations,
getDemoArchive: async () => null,
backup: new StubBackupService(),
image: stubImageProvider
});
}
interface NoteMapping {
rootNoteId: string;
path: string;
@@ -136,8 +72,9 @@ async function exportDocs(
) {
const zipFilePath = `output-${noteId}.zip`;
try {
const { zipExportService } = await import("@triliumnext/core");
await zipExportService.exportToZipFile(noteId, format, zipFilePath, {});
const { exportToZipFile } = (await import("@triliumnext/server/src/services/export/zip.js"))
.default;
await exportToZipFile(noteId, format, zipFilePath, {});
const ignoredSet = ignoredFiles ? new Set(ignoredFiles) : undefined;
await extractZip(zipFilePath, outputPath, ignoredSet);
@@ -155,12 +92,18 @@ async function importAndExportDocs(sourcePath: string, outputSubDir: string) {
const zipName = outputSubDir || "user-guide";
const zipFilePath = `output-${zipName}.zip`;
try {
const { zipExportService, TaskContext } = await import("@triliumnext/core");
const { waitForStreamToFinish } = await import("@triliumnext/server/src/services/utils.js");
const { exportToZip } = (await import("@triliumnext/server/src/services/export/zip.js"))
.default;
const branch = note.getParentBranches()[0];
const taskContext = new TaskContext("no-progress-reporting", "export", null);
const taskContext = new (await import("@triliumnext/server/src/services/task_context.js"))
.default(
"no-progress-reporting",
"export",
null
);
const fileOutputStream = fsExtra.createWriteStream(zipFilePath);
await zipExportService.exportToZip(taskContext, branch, "share", fileOutputStream);
await exportToZip(taskContext, branch, "share", fileOutputStream);
const { waitForStreamToFinish } = await import("@triliumnext/server/src/services/utils.js");
await waitForStreamToFinish(fileOutputStream);
// Output to root directory if outputSubDir is empty, otherwise to subdirectory
@@ -174,11 +117,15 @@ async function importAndExportDocs(sourcePath: string, outputSubDir: string) {
}
async function buildDocsInner(config?: Config) {
const { sql_init, becca_loader } = await import("@triliumnext/core");
await sql_init.createInitialDatabase(true);
const i18n = await import("@triliumnext/server/src/services/i18n.js");
await i18n.initializeTranslations();
const sqlInit = (await import("../../server/src/services/sql_init.js")).default;
await sqlInit.createInitialDatabase(true);
// Wait for becca to be loaded before importing data
await becca_loader.beccaLoaded;
const beccaLoader = await import("../../server/src/becca/becca_loader.js");
await beccaLoader.beccaLoaded;
if (config) {
// Config-based build (reads from edit-docs-config.yaml)
@@ -229,14 +176,16 @@ async function buildDocsInner(config?: Config) {
export async function importData(path: string) {
const buffer = await createImportZip(path);
const { zipImportService, TaskContext, becca } = await import("@triliumnext/core");
const importService = (await import("../../server/src/services/import/zip.js")).default;
const TaskContext = (await import("../../server/src/services/task_context.js")).default;
const context = new TaskContext("no-progress-reporting", "importNotes", null);
const becca = (await import("../../server/src/becca/becca.js")).default;
const rootNote = becca.getRoot();
if (!rootNote) {
throw new Error("Missing root note for import.");
}
return await zipImportService.importZip(context, buffer, rootNote, {
return await importService.importZip(context, buffer, rootNote, {
preserveIds: true
});
}
@@ -269,16 +218,20 @@ export async function extractZip(
outputPath: string,
ignoredFiles?: Set<string>
) {
const { getZipProvider } = await import("@triliumnext/core");
await getZipProvider().readZipFile(await fs.readFile(zipFilePath), async (entry, readContent) => {
const { readZipFile, readContent } = (await import(
"@triliumnext/server/src/services/import/zip.js"
));
await readZipFile(await fs.readFile(zipFilePath), async (zip, entry) => {
// We ignore directories since they can appear out of order anyway.
if (!entry.fileName.endsWith("/") && !ignoredFiles?.has(entry.fileName)) {
const destPath = join(outputPath, entry.fileName);
const fileContent = await readContent();
const fileContent = await readContent(zip, entry);
await fsExtra.mkdirs(dirname(destPath));
await fs.writeFile(destPath, fileContent);
}
zip.readEntry();
});
}
@@ -293,12 +246,9 @@ export async function buildDocsFromConfig(configPath?: string, gitRootDir?: stri
});
}
// Initialize the build environment before using cls
await initializeBuildEnvironment();
// Trigger the actual build.
await new Promise((res, rej) => {
getContext().init(() => {
cls.init(() => {
buildDocsInner(config ?? undefined)
.catch(rej)
.then(res);
@@ -313,12 +263,9 @@ export default async function buildDocs({ gitRootDir }: BuildContext) {
cwd: gitRootDir
});
// Initialize the build environment before using cls
await initializeBuildEnvironment();
// Trigger the actual build.
await new Promise((res, rej) => {
getContext().init(() => {
cls.init(() => {
buildDocsInner()
.catch(rej)
.then(res);

View File

@@ -28,13 +28,4 @@ async function main() {
cpSync(join(context.baseDir, "user-guide/404.html"), join(context.baseDir, "404.html"));
}
// Note: forcing process.exit() because importing notes via the core triggers
// fire-and-forget async work in `notes.ts#downloadImages` (a 5s setTimeout that
// re-schedules itself via `asyncPostProcessContent`), which keeps the libuv
// event loop alive forever even after main() completes.
main()
.then(() => process.exit(0))
.catch((error) => {
console.error("Error building documentation:", error);
process.exit(1);
});
main();

View File

@@ -23,12 +23,6 @@
"eslint.config.mjs"
],
"references": [
{
"path": "../../packages/commons/tsconfig.lib.json"
},
{
"path": "../../packages/trilium-core/tsconfig.lib.json"
},
{
"path": "../server/tsconfig.app.json"
},

View File

@@ -1,7 +1,8 @@
{
"extends": "../../tsconfig.base.json",
"files": [],
"include": [],
"include": [
"scripts/**/*.ts"
],
"references": [
{
"path": "../server"

View File

@@ -1,6 +1,6 @@
{
"name": "@triliumnext/client-standalone",
"version": "0.102.2",
"version": "0.102.1",
"description": "Standalone client for TriliumNext with SQLite WASM backend",
"private": true,
"license": "AGPL-3.0-only",
@@ -8,10 +8,8 @@
"build": "cross-env NODE_OPTIONS=--max-old-space-size=4096 vite build",
"dev": "vite dev",
"test": "vitest",
"start-prod": "pnpm build && pnpm vite preview --port 8888",
"coverage": "vitest --coverage",
"e2e": "playwright test",
"start-prod-no-dir": "pnpm build && pnpm vite preview --host 127.0.0.1"
"start-prod": "pnpm build && pnpm http-server dist -p 8888",
"coverage": "vitest --coverage"
},
"dependencies": {
"@excalidraw/excalidraw": "0.18.0",
@@ -25,7 +23,7 @@
"@mermaid-js/layout-elk": "0.2.1",
"@mind-elixir/node-menu": "5.0.1",
"@popperjs/core": "2.11.8",
"@preact/signals": "2.9.0",
"@preact/signals": "2.8.2",
"@sqlite.org/sqlite-wasm": "3.51.1-build2",
"@triliumnext/ckeditor5": "workspace:*",
"@triliumnext/codemirror": "workspace:*",
@@ -34,7 +32,7 @@
"@triliumnext/highlightjs": "workspace:*",
"@triliumnext/share-theme": "workspace:*",
"@triliumnext/split.js": "workspace:*",
"@zumer/snapdom": "2.8.0",
"@zumer/snapdom": "2.6.0",
"autocomplete.js": "0.38.1",
"bootstrap": "5.3.8",
"boxicons": "2.1.4",
@@ -42,32 +40,28 @@
"color": "5.0.3",
"debounce": "3.0.0",
"draggabilly": "3.0.0",
"fflate": "0.8.2",
"force-graph": "1.51.2",
"globals": "17.4.0",
"i18next": "26.0.4",
"i18next-http-backend": "3.0.4",
"aes-js": "3.1.2",
"i18next": "25.10.10",
"i18next-http-backend": "3.0.2",
"jquery": "4.0.0",
"jquery.fancytree": "2.38.5",
"js-md5": "0.8.3",
"js-sha1": "0.7.0",
"js-sha256": "0.11.1",
"js-sha512": "0.9.0",
"scrypt-js": "3.0.1",
"jsplumb": "2.15.6",
"katex": "0.16.45",
"katex": "0.16.43",
"knockout": "3.5.1",
"leaflet": "1.9.4",
"leaflet-gpx": "2.2.0",
"mark.js": "8.11.1",
"marked": "18.0.0",
"mermaid": "11.14.0",
"mind-elixir": "5.10.0",
"marked": "17.0.5",
"mermaid": "11.13.0",
"mind-elixir": "5.9.3",
"normalize.css": "8.0.1",
"panzoom": "9.4.4",
"preact": "10.29.1",
"react-i18next": "17.0.2",
"preact": "10.29.0",
"react-i18next": "16.6.6",
"react-window": "2.2.7",
"reveal.js": "6.0.0",
"svg-pan-zoom": "3.6.2",
@@ -75,7 +69,6 @@
"vanilla-js-wheel-zoom": "9.0.4"
},
"devDependencies": {
"@types/aes-js": "3.1.4",
"@ckeditor/ckeditor5-inspector": "5.0.0",
"@preact/preset-vite": "2.10.2",
"@types/bootstrap": "5.2.10",
@@ -87,8 +80,8 @@
"@types/tabulator-tables": "6.3.1",
"copy-webpack-plugin": "14.0.0",
"cross-env": "7.0.3",
"happy-dom": "20.8.9",
"happy-dom": "20.8.8",
"script-loader": "0.7.2",
"vite-plugin-static-copy": "4.0.1"
"vite-plugin-static-copy": "3.4.0"
}
}

View File

@@ -1,20 +0,0 @@
import { createBaseConfig } from "../../packages/trilium-e2e/src/base-config";
const port = process.env["TRILIUM_PORT"] ?? "8082";
const baseURL = process.env["BASE_URL"] || `http://127.0.0.1:${port}`;
export default createBaseConfig({
appDir: __dirname,
projectName: "standalone",
workers: 1,
webServer: !process.env.TRILIUM_DOCKER ? {
command: `pnpm build && pnpm vite preview --host 127.0.0.1 --port ${port}`,
url: baseURL,
env: {
TRILIUM_INTEGRATION_TEST: "memory"
},
reuseExistingServer: !process.env.CI,
cwd: __dirname,
timeout: 5 * 60 * 1000
} : undefined,
});

View File

@@ -1,156 +0,0 @@
import type { DatabaseBackup } from "@triliumnext/commons";
import { BackupOptionsService, BackupService, getSql } from "@triliumnext/core";
const BACKUP_DIR_NAME = "backups";
const BACKUP_FILE_PATTERN = /^backup-.*\.db$/;
/**
* Standalone backup service using OPFS (Origin Private File System).
* Stores database backups as serialized byte arrays in OPFS.
* Falls back to no-op behavior when OPFS is not available (e.g., in tests).
*/
export default class StandaloneBackupService extends BackupService {
private backupDir: FileSystemDirectoryHandle | null = null;
private opfsAvailable: boolean | null = null;
constructor(options: BackupOptionsService) {
super(options);
}
private isOpfsAvailable(): boolean {
if (this.opfsAvailable === null) {
this.opfsAvailable = typeof navigator !== "undefined"
&& navigator.storage
&& typeof navigator.storage.getDirectory === "function";
}
return this.opfsAvailable;
}
private async ensureBackupDirectory(): Promise<FileSystemDirectoryHandle | null> {
if (!this.isOpfsAvailable()) {
return null;
}
if (!this.backupDir) {
const root = await navigator.storage.getDirectory();
this.backupDir = await root.getDirectoryHandle(BACKUP_DIR_NAME, { create: true });
}
return this.backupDir;
}
override async backupNow(name: string): Promise<string> {
const fileName = `backup-${name}.db`;
// Check if OPFS is available
if (!this.isOpfsAvailable()) {
console.warn(`[Backup] OPFS not available, skipping backup: ${fileName}`);
return `/${BACKUP_DIR_NAME}/${fileName}`;
}
try {
const dir = await this.ensureBackupDirectory();
if (!dir) {
console.warn(`[Backup] Backup directory not available, skipping: ${fileName}`);
return `/${BACKUP_DIR_NAME}/${fileName}`;
}
// Serialize the database
const data = getSql().serialize();
// Write to OPFS
const fileHandle = await dir.getFileHandle(fileName, { create: true });
const writable = await fileHandle.createWritable();
await writable.write(data);
await writable.close();
console.log(`[Backup] Created backup: ${fileName} (${data.byteLength} bytes)`);
return `/${BACKUP_DIR_NAME}/${fileName}`;
} catch (error) {
console.error(`[Backup] Failed to create backup ${fileName}:`, error);
// Don't throw - backup failure shouldn't block operations
return `/${BACKUP_DIR_NAME}/${fileName}`;
}
}
override async getExistingBackups(): Promise<DatabaseBackup[]> {
if (!this.isOpfsAvailable()) {
return [];
}
try {
const dir = await this.ensureBackupDirectory();
if (!dir) {
return [];
}
const backups: DatabaseBackup[] = [];
for await (const [name, handle] of dir.entries()) {
if (handle.kind !== "file" || !BACKUP_FILE_PATTERN.test(name)) {
continue;
}
const file = await (handle as FileSystemFileHandle).getFile();
backups.push({
fileName: name,
filePath: `/${BACKUP_DIR_NAME}/${name}`,
mtime: new Date(file.lastModified)
});
}
// Sort by modification time, newest first
backups.sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
return backups;
} catch (error) {
console.error("[Backup] Failed to list backups:", error);
return [];
}
}
/**
* Delete a backup by filename.
*/
async deleteBackup(fileName: string): Promise<void> {
if (!this.isOpfsAvailable()) {
return;
}
try {
const dir = await this.ensureBackupDirectory();
if (!dir) {
return;
}
await dir.removeEntry(fileName);
console.log(`[Backup] Deleted backup: ${fileName}`);
} catch (error) {
console.error(`[Backup] Failed to delete backup ${fileName}:`, error);
}
}
override async getBackupContent(filePath: string): Promise<Uint8Array | null> {
if (!this.isOpfsAvailable()) {
return null;
}
try {
const dir = await this.ensureBackupDirectory();
if (!dir) {
return null;
}
// Extract fileName from filePath (e.g., "/backups/backup-now.db" -> "backup-now.db")
const fileName = filePath.split("/").pop();
if (!fileName || !BACKUP_FILE_PATTERN.test(fileName)) {
return null;
}
const fileHandle = await dir.getFileHandle(fileName);
const file = await fileHandle.getFile();
const data = await file.arrayBuffer();
return new Uint8Array(data);
} catch (error) {
console.error(`[Backup] Failed to get backup content ${filePath}:`, error);
return null;
}
}
}

View File

@@ -5,12 +5,6 @@
import { getContext, routes } from "@triliumnext/core";
export interface UploadedFile {
originalname: string;
mimetype: string;
buffer: Uint8Array;
}
export interface BrowserRequest {
method: string;
url: string;
@@ -19,7 +13,6 @@ export interface BrowserRequest {
query: Record<string, string | undefined>;
headers?: Record<string, string>;
body?: unknown;
file?: UploadedFile;
}
export interface BrowserResponse {
@@ -161,9 +154,8 @@ export class BrowserRouter {
const query = parseQuery(url.search);
const upperMethod = method.toUpperCase();
// Parse body based on content-type
// Parse JSON body if it's an ArrayBuffer and content-type suggests JSON
let parsedBody = body;
let uploadedFile: UploadedFile | undefined;
if (body instanceof ArrayBuffer && headers) {
const contentType = headers['content-type'] || headers['Content-Type'] || '';
if (contentType.includes('application/json')) {
@@ -174,31 +166,9 @@ export class BrowserRouter {
}
} catch (e) {
console.warn('[Router] Failed to parse JSON body:', e);
// Keep original body if JSON parsing fails
parsedBody = body;
}
} else if (contentType.includes('multipart/form-data')) {
try {
// Reconstruct a Response so we can use the native FormData parser
const response = new Response(body, { headers: { 'content-type': contentType } });
const formData = await response.formData();
const formFields: Record<string, string> = {};
for (const [key, value] of formData.entries()) {
if (typeof value === 'string') {
formFields[key] = value;
} else {
// File field (Blob) — multer uses the field name "upload"
const fileBuffer = new Uint8Array(await value.arrayBuffer());
uploadedFile = {
originalname: value.name,
mimetype: value.type || 'application/octet-stream',
buffer: fileBuffer
};
}
}
parsedBody = formFields;
} catch (e) {
console.warn('[Router] Failed to parse multipart body:', e);
}
}
}
// Find matching route
@@ -221,8 +191,7 @@ export class BrowserRouter {
params,
query,
headers: headers ?? {},
body: parsedBody,
file: uploadedFile
body: parsedBody
};
try {

View File

@@ -35,7 +35,6 @@ function toExpressLikeReq(req: BrowserRequest) {
body: req.body,
headers: req.headers ?? {},
method: req.method,
file: req.file,
get originalUrl() { return req.url; }
};
}
@@ -122,51 +121,11 @@ function createRoute(router: BrowserRouter) {
};
}
/**
* Async variant of createRoute for handlers that return Promises (e.g. import).
* Uses transactionalAsync (manual BEGIN/COMMIT/ROLLBACK) instead of the synchronous
* transactional() wrapper, which would commit an empty transaction immediately when
* passed an async callback.
*/
function createAsyncRoute(router: BrowserRouter) {
return (method: HttpMethod, path: string, _middleware: any[], handler: (req: any, res: any) => Promise<unknown>, resultHandler?: ((req: any, res: any, result: unknown) => unknown) | null) => {
router.register(method, path, (req: BrowserRequest) => {
return getContext().init(async () => {
setContextFromHeaders(req);
const expressLikeReq = toExpressLikeReq(req);
const mockRes = createMockExpressResponse();
const result = await getSql().transactionalAsync(() => handler(expressLikeReq, mockRes));
// If the handler used the mock response (e.g. image routes that call res.send()),
// return it as a raw response so BrowserRouter doesn't JSON-serialize it.
if (mockRes._used) {
return {
[RAW_RESPONSE]: true as const,
status: mockRes._status,
headers: mockRes._headers,
body: mockRes._body
};
}
if (resultHandler) {
// Create a minimal response object that captures what apiResultHandler sets.
const res = createResultHandlerResponse();
resultHandler(expressLikeReq, res, result);
return res.result;
}
return result;
});
});
};
}
/**
* Creates a mock Express response object that captures calls to set(), send(), sendStatus(), etc.
* Used for route handlers (like image routes) that write directly to the response.
*/
function createMockExpressResponse() {
const chunks: string[] = [];
const res = {
_used: false,
_status: 200,
@@ -180,10 +139,6 @@ function createMockExpressResponse() {
res._headers[name] = value;
return res;
},
removeHeader(name: string) {
delete res._headers[name];
return res;
},
status(code: number) {
res._status = code;
return res;
@@ -197,15 +152,6 @@ function createMockExpressResponse() {
res._used = true;
res._status = code;
return res;
},
write(chunk: string) {
chunks.push(chunk);
return true;
},
end() {
res._used = true;
res._body = chunks.join("");
return res;
}
};
return res;
@@ -273,7 +219,7 @@ export function registerRoutes(router: BrowserRouter): void {
const apiRoute = createApiRoute(router, true);
routes.buildSharedApiRoutes({
route: createRoute(router),
asyncRoute: createAsyncRoute(router),
asyncRoute: createRoute(router),
apiRoute,
asyncApiRoute: createApiRoute(router, false),
apiResultHandler,
@@ -281,9 +227,7 @@ export function registerRoutes(router: BrowserRouter): void {
checkApiAuthOrElectron: noopMiddleware,
checkAppNotInitialized,
checkCredentials: noopMiddleware,
loginRateLimiter: noopMiddleware,
uploadMiddlewareWithErrorHandling: noopMiddleware,
csrfMiddleware: noopMiddleware
loginRateLimiter: noopMiddleware
});
apiRoute('get', '/bootstrap', bootstrapRoute);

View File

@@ -1,31 +1,25 @@
import type { Cipher, CryptoProvider, ScryptOptions } from "@triliumnext/core";
import { binary_utils } from "@triliumnext/core";
import type { CryptoProvider } from "@triliumnext/core";
import { sha1 } from "js-sha1";
import { sha256 } from "js-sha256";
import { sha512 } from "js-sha512";
import { md5 } from "js-md5";
import { scrypt } from "scrypt-js";
import aesjs from "aes-js";
interface Cipher {
update(data: Uint8Array): Uint8Array;
final(): Uint8Array;
}
const CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
/**
* Crypto provider for browser environments using pure JavaScript crypto libraries.
* Uses aes-js for synchronous AES encryption (matching Node.js behavior).
* Crypto provider for browser environments using the Web Crypto API.
*/
export default class BrowserCryptoProvider implements CryptoProvider {
createHash(algorithm: "md5" | "sha1" | "sha512", content: string | Uint8Array): Uint8Array {
const data = binary_utils.unwrapStringOrBuffer(content);
createHash(algorithm: "sha1" | "sha512", content: string | Uint8Array): Uint8Array {
const data = typeof content === "string" ? content :
new TextDecoder().decode(content);
let hexHash: string;
if (algorithm === "md5") {
hexHash = md5(data);
} else if (algorithm === "sha1") {
hexHash = sha1(data);
} else {
hexHash = sha512(data);
}
const hexHash = algorithm === "sha1" ? sha1(data) : sha512(data);
// Convert hex string to Uint8Array
const bytes = new Uint8Array(hexHash.length / 2);
@@ -36,11 +30,13 @@ export default class BrowserCryptoProvider implements CryptoProvider {
}
createCipheriv(algorithm: "aes-128-cbc", key: Uint8Array, iv: Uint8Array): Cipher {
return new AesJsCipher(algorithm, key, iv, "encrypt");
// Web Crypto API doesn't support streaming cipher like Node.js
// We need to implement a wrapper that collects data and encrypts on final()
return new WebCryptoCipher(algorithm, key, iv, "encrypt");
}
createDecipheriv(algorithm: "aes-128-cbc", key: Uint8Array, iv: Uint8Array): Cipher {
return new AesJsCipher(algorithm, key, iv, "decrypt");
return new WebCryptoCipher(algorithm, key, iv, "decrypt");
}
randomBytes(size: number): Uint8Array {
@@ -59,8 +55,8 @@ export default class BrowserCryptoProvider implements CryptoProvider {
}
hmac(secret: string | Uint8Array, value: string | Uint8Array): string {
const secretStr = binary_utils.unwrapStringOrBuffer(secret);
const valueStr = binary_utils.unwrapStringOrBuffer(value);
const secretStr = typeof secret === "string" ? secret : new TextDecoder().decode(secret);
const valueStr = typeof value === "string" ? value : new TextDecoder().decode(value);
// sha256.hmac returns hex, convert to base64 to match Node's behavior
const hexHash = sha256.hmac(secretStr, valueStr);
const bytes = new Uint8Array(hexHash.length / 2);
@@ -69,50 +65,28 @@ export default class BrowserCryptoProvider implements CryptoProvider {
}
return btoa(String.fromCharCode(...bytes));
}
async scrypt(
password: Uint8Array | string,
salt: Uint8Array | string,
keyLength: number,
options: ScryptOptions = {}
): Promise<Uint8Array> {
const { N = 16384, r = 8, p = 1 } = options;
const passwordBytes = binary_utils.wrapStringOrBuffer(password);
const saltBytes = binary_utils.wrapStringOrBuffer(salt);
return scrypt(passwordBytes, saltBytes, N, r, p, keyLength);
}
constantTimeCompare(a: Uint8Array, b: Uint8Array): boolean {
if (a.length !== b.length) {
return false;
}
let result = 0;
for (let i = 0; i < a.length; i++) {
result |= a[i] ^ b[i];
}
return result === 0;
}
}
/**
* A synchronous cipher implementation using aes-js.
* Matches Node.js crypto behavior with update() and final() methods.
* A cipher implementation that wraps Web Crypto API.
* Note: This buffers all data until final() is called, which differs from
* Node.js's streaming cipher behavior.
*/
class AesJsCipher implements Cipher {
class WebCryptoCipher implements Cipher {
private chunks: Uint8Array[] = [];
private algorithm: string;
private key: Uint8Array;
private iv: Uint8Array;
private mode: "encrypt" | "decrypt";
private finalized = false;
constructor(
_algorithm: "aes-128-cbc",
algorithm: "aes-128-cbc",
key: Uint8Array,
iv: Uint8Array,
mode: "encrypt" | "decrypt"
) {
this.algorithm = algorithm;
this.key = key;
this.iv = iv;
this.mode = mode;
@@ -122,9 +96,9 @@ class AesJsCipher implements Cipher {
if (this.finalized) {
throw new Error("Cipher has already been finalized");
}
// Buffer the data - we process everything in final() to match streaming behavior
// Buffer the data - Web Crypto doesn't support streaming
this.chunks.push(data);
// Return empty array since aes-js CBC doesn't support true streaming
// Return empty array since we process everything in final()
return new Uint8Array(0);
}
@@ -134,6 +108,24 @@ class AesJsCipher implements Cipher {
}
this.finalized = true;
// Web Crypto API is async, but we need sync behavior
// This is a fundamental limitation that requires architectural changes
// For now, throw an error directing users to use async methods
throw new Error(
"Synchronous cipher finalization not available in browser. " +
"The Web Crypto API is async-only. Use finalizeAsync() instead."
);
}
/**
* Async version that actually performs the encryption/decryption.
*/
async finalizeAsync(): Promise<Uint8Array> {
if (this.finalized) {
throw new Error("Cipher has already been finalized");
}
this.finalized = true;
// Concatenate all chunks
const totalLength = this.chunks.reduce((sum, chunk) => sum + chunk.length, 0);
const data = new Uint8Array(totalLength);
@@ -143,33 +135,24 @@ class AesJsCipher implements Cipher {
offset += chunk.length;
}
if (this.mode === "encrypt") {
// PKCS7 padding for encryption
const blockSize = 16;
const paddingLength = blockSize - (data.length % blockSize);
const paddedData = new Uint8Array(data.length + paddingLength);
paddedData.set(data);
paddedData.fill(paddingLength, data.length);
// Copy key and iv to ensure they're plain ArrayBuffer-backed
const keyBuffer = new Uint8Array(this.key);
const ivBuffer = new Uint8Array(this.iv);
const aesCbc = new aesjs.ModeOfOperation.cbc(
Array.from(this.key),
Array.from(this.iv)
);
return new Uint8Array(aesCbc.encrypt(paddedData));
} else {
// Decryption
const aesCbc = new aesjs.ModeOfOperation.cbc(
Array.from(this.key),
Array.from(this.iv)
);
const decrypted = new Uint8Array(aesCbc.decrypt(data));
// Import the key
const cryptoKey = await crypto.subtle.importKey(
"raw",
keyBuffer,
{ name: "AES-CBC" },
false,
[this.mode]
);
// Remove PKCS7 padding
const paddingLength = decrypted[decrypted.length - 1];
if (paddingLength > 0 && paddingLength <= 16) {
return decrypted.slice(0, decrypted.length - paddingLength);
}
return decrypted;
}
// Perform encryption/decryption
const result = this.mode === "encrypt"
? await crypto.subtle.encrypt({ name: "AES-CBC", iv: ivBuffer }, cryptoKey, data)
: await crypto.subtle.decrypt({ name: "AES-CBC", iv: ivBuffer }, cryptoKey, data);
return new Uint8Array(result);
}
}

View File

@@ -1,168 +0,0 @@
import { FileBasedLogService, type LogFileInfo } from "@triliumnext/core";
const LOG_DIR_NAME = "logs";
const LOG_FILE_PATTERN = /^trilium-\d{4}-\d{2}-\d{2}\.log$/;
const DEFAULT_RETENTION_DAYS = 7;
/**
* Standalone log service using OPFS (Origin Private File System).
* Uses synchronous access handles available in service worker context.
*/
export default class StandaloneLogService extends FileBasedLogService {
private logDir: FileSystemDirectoryHandle | null = null;
private currentFile: FileSystemSyncAccessHandle | null = null;
private currentFileName: string = "";
private textEncoder = new TextEncoder();
private textDecoder = new TextDecoder();
constructor() {
super();
}
// ==================== Abstract Method Implementations ====================
protected override get eol(): string {
return "\n";
}
protected override async ensureLogDirectory(): Promise<void> {
const root = await navigator.storage.getDirectory();
this.logDir = await root.getDirectoryHandle(LOG_DIR_NAME, { create: true });
}
protected override async openLogFile(fileName: string): Promise<void> {
if (!this.logDir) {
await this.ensureLogDirectory();
}
// Close existing file if open
if (this.currentFile) {
this.currentFile.close();
this.currentFile = null;
}
const fileHandle = await this.logDir!.getFileHandle(fileName, { create: true });
// Try to create sync access handle with retry logic for worker restarts
// Previous worker may have left handle open before being terminated
const maxRetries = 3;
const retryDelay = 100;
for (let attempt = 0; attempt < maxRetries; attempt++) {
try {
this.currentFile = await fileHandle.createSyncAccessHandle();
break;
} catch (error) {
if (attempt === maxRetries - 1) {
// Last attempt failed - fall back to console-only logging
console.warn("[LogService] Could not open log file, using console-only logging:", error);
this.currentFile = null;
this.currentFileName = "";
return;
}
// Wait before retrying - previous handle may be released
await new Promise(resolve => setTimeout(resolve, retryDelay * (attempt + 1)));
}
}
this.currentFileName = fileName;
// Seek to end for appending
if (this.currentFile) {
const size = this.currentFile.getSize();
this.currentFile.truncate(size); // No-op, but ensures we're at the right position
}
}
protected override closeLogFile(): void {
if (this.currentFile) {
this.currentFile.close();
this.currentFile = null;
this.currentFileName = "";
}
}
protected override writeEntry(entry: string): void {
if (!this.currentFile) {
console.log(entry); // Fallback to console if file not ready
return;
}
const data = this.textEncoder.encode(entry);
const currentSize = this.currentFile.getSize();
this.currentFile.write(data, { at: currentSize });
this.currentFile.flush();
}
protected override readLogFile(fileName: string): string | null {
if (!this.logDir) {
return null;
}
try {
// For the current file, we need to read from the sync handle
if (fileName === this.currentFileName && this.currentFile) {
const size = this.currentFile.getSize();
const buffer = new ArrayBuffer(size);
const view = new DataView(buffer);
this.currentFile.read(view, { at: 0 });
return this.textDecoder.decode(buffer);
}
// For other files, we'd need async access - return null for now
// The current file is what's most commonly needed
return null;
} catch {
return null;
}
}
protected override async listLogFiles(): Promise<LogFileInfo[]> {
if (!this.logDir) {
return [];
}
const logFiles: LogFileInfo[] = [];
for await (const [name, handle] of this.logDir.entries()) {
if (handle.kind !== "file" || !LOG_FILE_PATTERN.test(name)) {
continue;
}
// OPFS doesn't provide mtime directly, so we parse from filename
const match = name.match(/trilium-(\d{4})-(\d{2})-(\d{2})\.log/);
if (match) {
const mtime = new Date(
parseInt(match[1]),
parseInt(match[2]) - 1,
parseInt(match[3])
);
logFiles.push({ name, mtime });
}
}
return logFiles;
}
protected override async deleteLogFile(fileName: string): Promise<void> {
if (!this.logDir) {
return;
}
// Don't delete the current file
if (fileName === this.currentFileName) {
return;
}
try {
await this.logDir.removeEntry(fileName);
} catch {
// File might not exist or be locked
}
}
protected override getRetentionDays(): number {
// Standalone doesn't have config system, use default
return DEFAULT_RETENTION_DAYS;
}
}

View File

@@ -1,8 +1,5 @@
import type { PlatformProvider } from "@triliumnext/core";
// Build-time constant injected by Vite (see `define` in vite.config.mts).
declare const __TRILIUM_INTEGRATION_TEST__: string;
/** Maps URL query parameter names to TRILIUM_ environment variable names. */
const QUERY_TO_ENV: Record<string, string> = {
"safeMode": "TRILIUM_SAFE_MODE",
@@ -23,9 +20,6 @@ export default class StandalonePlatformProvider implements PlatformProvider {
this.envMap[envKey] = params.get(queryKey) || "true";
}
}
if (__TRILIUM_INTEGRATION_TEST__) {
this.envMap["TRILIUM_INTEGRATION_TEST"] = __TRILIUM_INTEGRATION_TEST__;
}
}
crash(message: string): void {

View File

@@ -1,4 +1,4 @@
import { type BindableValue, type SAHPoolUtil, default as sqlite3InitModule } from "@sqlite.org/sqlite-wasm";
import { type BindableValue, default as sqlite3InitModule } from "@sqlite.org/sqlite-wasm";
import type { DatabaseProvider, RunResult, Statement, Transaction } from "@triliumnext/core";
// Type definitions for SQLite WASM (the library doesn't export these directly)
@@ -227,10 +227,6 @@ export default class BrowserSqlProvider implements DatabaseProvider {
// OPFS state tracking
private opfsDbPath?: string;
// SAHPool state tracking
private sahPoolUtil?: SAHPoolUtil;
private sahPoolDbName?: string;
/**
* Get the SQLite WASM module version info.
* Returns undefined if the module hasn't been initialized yet.
@@ -291,116 +287,16 @@ export default class BrowserSqlProvider implements DatabaseProvider {
return this.sqlite3 !== undefined;
}
// ==================== SAHPool VFS (preferred OPFS backend) ====================
// ==================== OPFS Support ====================
/**
* Install the OPFS SAHPool VFS. This pre-allocates a pool of OPFS
* SyncAccessHandle objects, enabling WAL mode and significantly faster
* writes compared to the legacy OPFS VFS.
*
* Must be called after `initWasm()` and before `loadFromSahPool()`.
* This is async because it acquires OPFS file handles.
*
* Unlike the legacy OPFS VFS, SAHPool does **not** require SharedArrayBuffer
* or COOP/COEP headers — it only needs OPFS itself (a Worker context with
* `navigator.storage.getDirectory`). This makes it usable in Capacitor's
* Android WebView, which doesn't support cross-origin isolation.
*
* @param options.directory - OPFS directory for the pool (default: auto-derived from VFS name)
* @param options.initialCapacity - Minimum number of file slots (default: 6)
* @throws Error if the environment doesn't support OPFS (no Worker, or no OPFS API)
*/
async installSahPool(options: { directory?: string; initialCapacity?: number } = {}): Promise<void> {
this.ensureSqlite3();
console.log("[BrowserSqlProvider] Installing OPFS SAHPool VFS...");
const startTime = performance.now();
this.sahPoolUtil = await this.sqlite3!.installOpfsSAHPoolVfs({
clearOnInit: false,
initialCapacity: options.initialCapacity ?? 6,
directory: options.directory,
});
// Ensure enough slots for DB + WAL + journal + temp files
await this.sahPoolUtil.reserveMinimumCapacity(options.initialCapacity ?? 6);
const initTime = performance.now() - startTime;
console.log(
`[BrowserSqlProvider] SAHPool VFS installed in ${initTime.toFixed(2)}ms ` +
`(capacity: ${this.sahPoolUtil.getCapacity()}, files: ${this.sahPoolUtil.getFileCount()})`
);
}
/**
* Whether the SAHPool VFS has been successfully installed.
*/
get isSahPoolInstalled(): boolean {
return this.sahPoolUtil !== undefined;
}
/**
* Access the SAHPool utility for advanced operations (import/export/migration).
*/
get sahPool(): SAHPoolUtil | undefined {
return this.sahPoolUtil;
}
/**
* Load or create a database using the SAHPool VFS.
* This is the preferred method for persistent storage — it supports WAL mode
* and is significantly faster than the legacy OPFS VFS.
*
* @param dbName - Virtual filename within the pool (e.g., "/trilium.db").
* Must start with a slash.
* @throws Error if SAHPool VFS is not installed
*/
loadFromSahPool(dbName: string): void {
this.ensureSqlite3();
if (!this.sahPoolUtil) {
throw new Error(
"SAHPool VFS not installed. Call installSahPool() first."
);
}
console.log(`[BrowserSqlProvider] Loading database from SAHPool: ${dbName}`);
const startTime = performance.now();
try {
this.db = new this.sahPoolUtil.OpfsSAHPoolDb(dbName);
this.sahPoolDbName = dbName;
this.opfsDbPath = undefined;
// SAHPool supports WAL mode — the key advantage over legacy OPFS VFS
this.db.exec("PRAGMA journal_mode = WAL");
this.db.exec("PRAGMA synchronous = NORMAL");
const loadTime = performance.now() - startTime;
console.log(`[BrowserSqlProvider] SAHPool database loaded in ${loadTime.toFixed(2)}ms (WAL mode)`);
} catch (e) {
const error = e instanceof Error ? e : new Error(String(e));
console.error(`[BrowserSqlProvider] Failed to load SAHPool database: ${error.message}`);
throw error;
}
}
/**
* Whether the currently open database is using the SAHPool VFS.
*/
get isUsingSahPool(): boolean {
return this.sahPoolDbName !== undefined;
}
// ==================== Legacy OPFS Support ====================
/**
* Check if the legacy OPFS VFS is available.
* Check if the OPFS VFS is available.
* This requires:
* - Running in a Worker context
* - Browser support for OPFS APIs
* - COOP/COEP headers sent by the server (for SharedArrayBuffer)
*
* @returns true if legacy OPFS VFS is available for use
* @returns true if OPFS VFS is available for use
*/
isOpfsAvailable(): boolean {
this.ensureSqlite3();
@@ -411,9 +307,6 @@ export default class BrowserSqlProvider implements DatabaseProvider {
/**
* Load or create a database stored in OPFS for persistent storage.
*
* **Prefer `loadFromSahPool()` over this method** — it supports WAL mode
* and is significantly faster. This method is kept for migration purposes.
* The database will persist across browser sessions.
*
* Requires COOP/COEP headers to be set by the server:
@@ -461,10 +354,9 @@ export default class BrowserSqlProvider implements DatabaseProvider {
const mode = options.createIfNotExists !== false ? 'c' : '';
this.db = new this.sqlite3!.oo1.OpfsDb(path, mode);
this.opfsDbPath = path;
this.sahPoolDbName = undefined;
// Configure the database for legacy OPFS
// Note: WAL mode is not supported by the legacy OPFS VFS
// Configure the database for OPFS
// Note: WAL mode requires exclusive locking in OPFS environment
this.db.exec("PRAGMA journal_mode = DELETE");
this.db.exec("PRAGMA synchronous = NORMAL");
@@ -478,10 +370,10 @@ export default class BrowserSqlProvider implements DatabaseProvider {
}
/**
* Check if the currently open database is stored in OPFS (legacy or SAHPool).
* Check if the currently open database is stored in OPFS.
*/
get isUsingOpfs(): boolean {
return this.opfsDbPath !== undefined || this.sahPoolDbName !== undefined;
return this.opfsDbPath !== undefined;
}
/**
@@ -489,7 +381,7 @@ export default class BrowserSqlProvider implements DatabaseProvider {
* Returns undefined if not using OPFS.
*/
get currentOpfsPath(): string | undefined {
return this.opfsDbPath ?? this.sahPoolDbName;
return this.opfsDbPath;
}
/**
@@ -513,11 +405,11 @@ export default class BrowserSqlProvider implements DatabaseProvider {
loadFromFile(_path: string, _isReadOnly: boolean): void {
// Browser environment doesn't have direct file system access.
// Use SAHPool or OPFS for persistent storage.
// Use OPFS for persistent storage.
throw new Error(
"loadFromFile is not supported in browser environment. " +
"Use loadFromMemory() for temporary databases, loadFromBuffer() to load from data, " +
"loadFromSahPool() (preferred) or loadFromOpfs() for persistent storage."
"or loadFromOpfs() for persistent storage."
);
}
@@ -534,8 +426,7 @@ export default class BrowserSqlProvider implements DatabaseProvider {
const startTime = performance.now();
this.db = new this.sqlite3!.oo1.DB(":memory:", "c");
this.opfsDbPath = undefined;
this.sahPoolDbName = undefined;
this.opfsDbPath = undefined; // Not using OPFS
this.db.exec("PRAGMA journal_mode = WAL");
const loadTime = performance.now() - startTime;
@@ -544,28 +435,18 @@ export default class BrowserSqlProvider implements DatabaseProvider {
loadFromBuffer(buffer: Uint8Array): void {
this.ensureSqlite3();
// SQLite WASM's allocFromTypedArray rejects Node's Buffer (and other
// non-Uint8Array typed arrays) with "expecting 8/16/32/64". Normalize
// to a plain Uint8Array view over the same memory so callers can pass
// anything readFileSync returns.
const view = buffer instanceof Uint8Array && buffer.constructor === Uint8Array
? buffer
: new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength);
const p = this.sqlite3!.wasm.allocFromTypedArray(view);
// SQLite WASM can deserialize a database from a byte array
const p = this.sqlite3!.wasm.allocFromTypedArray(buffer);
try {
// Cached statements reference the previous DB and become invalid
// once we swap connections. Drop them so callers re-prepare.
this.clearStatementCache();
this.db = new this.sqlite3!.oo1.DB({ filename: ":memory:", flags: "c" });
this.opfsDbPath = undefined;
this.sahPoolDbName = undefined;
this.opfsDbPath = undefined; // Not using OPFS
const rc = this.sqlite3!.capi.sqlite3_deserialize(
this.db.pointer!,
"main",
p,
view.byteLength,
view.byteLength,
buffer.byteLength,
buffer.byteLength,
this.sqlite3!.capi.SQLITE_DESERIALIZE_FREEONCLOSE |
this.sqlite3!.capi.SQLITE_DESERIALIZE_RESIZEABLE
);
@@ -620,12 +501,9 @@ export default class BrowserSqlProvider implements DatabaseProvider {
// Helper function to execute within a transaction
const executeTransaction = (beginStatement: string, ...args: unknown[]): T => {
// If we're already in a transaction (either tracked via JS flag or via actual SQLite
// autocommit state), use SAVEPOINTs for nesting — this handles the case where a manual
// BEGIN was issued directly (e.g. transactionalAsync) without going through transaction().
const sqliteInTransaction = self.db?.pointer !== undefined
&& (self.sqlite3!.capi as any).sqlite3_get_autocommit(self.db!.pointer) === 0;
if (self._inTransaction || sqliteInTransaction) {
// If we're already in a transaction, use SAVEPOINTs for nesting
// This mimics better-sqlite3's behavior
if (self._inTransaction) {
const savepointName = `sp_${++savepointCounter}_${Date.now()}`;
self.db!.exec(`SAVEPOINT ${savepointName}`);
try {
@@ -682,7 +560,8 @@ export default class BrowserSqlProvider implements DatabaseProvider {
this.db!.exec(query);
}
private clearStatementCache(): void {
close(): void {
// Clean up all cached statements first
for (const statement of this.statementCache.values()) {
try {
statement.finalize();
@@ -692,19 +571,14 @@ export default class BrowserSqlProvider implements DatabaseProvider {
}
}
this.statementCache.clear();
}
close(): void {
this.clearStatementCache();
if (this.db) {
this.db.close();
this.db = undefined;
}
// Reset OPFS / SAHPool state
// Reset OPFS state
this.opfsDbPath = undefined;
this.sahPoolDbName = undefined;
}
/**
@@ -733,10 +607,7 @@ export default class BrowserSqlProvider implements DatabaseProvider {
private ensureDb(): void {
this.ensureSqlite3();
if (!this.db) {
throw new Error(
"Database not opened. Call loadFromMemory(), loadFromBuffer(), " +
"loadFromSahPool(), or loadFromOpfs() first."
);
throw new Error("Database not opened. Call loadFromMemory(), loadFromBuffer(), or loadFromOpfs() first.");
}
}
}

View File

@@ -1,18 +0,0 @@
import { type ExportFormat, type ZipExportProviderData, ZipExportProvider } from "@triliumnext/core";
import contentCss from "@triliumnext/ckeditor5/src/theme/ck-content.css?raw";
export async function standaloneZipExportProviderFactory(format: ExportFormat, data: ZipExportProviderData): Promise<ZipExportProvider> {
switch (format) {
case "html": {
const { default: HtmlExportProvider } = await import("@triliumnext/core/src/services/export/zip/html.js");
return new HtmlExportProvider(data, { contentCss });
}
case "markdown": {
const { default: MarkdownExportProvider } = await import("@triliumnext/core/src/services/export/zip/markdown.js");
return new MarkdownExportProvider(data);
}
default:
throw new Error(`Unsupported export format: '${format}'`);
}
}

View File

@@ -1,101 +0,0 @@
import type { FileStream, ZipArchive, ZipEntry, ZipProvider } from "@triliumnext/core/src/services/zip_provider.js";
import { strToU8, unzip, zipSync } from "fflate";
type ZipOutput = {
send?: (body: unknown) => unknown;
write?: (chunk: Uint8Array | string) => unknown;
end?: (chunk?: Uint8Array | string) => unknown;
};
class BrowserZipArchive implements ZipArchive {
readonly #entries: Record<string, Uint8Array> = {};
#destination: ZipOutput | null = null;
append(content: string | Uint8Array, options: { name: string }) {
this.#entries[options.name] = typeof content === "string" ? strToU8(content) : content;
}
pipe(destination: unknown) {
this.#destination = destination as ZipOutput;
}
async finalize(): Promise<void> {
if (!this.#destination) {
throw new Error("ZIP output destination not set.");
}
const content = zipSync(this.#entries, { level: 9 });
if (typeof this.#destination.send === "function") {
this.#destination.send(content);
return;
}
if (typeof this.#destination.end === "function") {
if (typeof this.#destination.write === "function") {
this.#destination.write(content);
this.#destination.end();
} else {
this.#destination.end(content);
}
return;
}
throw new Error("Unsupported ZIP output destination.");
}
}
export default class BrowserZipProvider implements ZipProvider {
createZipArchive(): ZipArchive {
return new BrowserZipArchive();
}
createFileStream(_filePath: string): FileStream {
throw new Error("File stream creation is not supported in the browser.");
}
readZipFile(
buffer: Uint8Array,
processEntry: (entry: ZipEntry, readContent: () => Promise<Uint8Array>) => Promise<void>
): Promise<void> {
return new Promise<void>((res, rej) => {
unzip(buffer, async (err, files) => {
if (err) { rej(err); return; }
try {
for (const [fileName, data] of Object.entries(files)) {
await processEntry(
{ fileName: decodeZipFileName(fileName) },
() => Promise.resolve(data)
);
}
res();
} catch (e) {
rej(e);
}
});
});
}
}
const utf8Decoder = new TextDecoder("utf-8", { fatal: true });
/**
* fflate decodes ZIP entry filenames as CP437/Latin-1 unless the language
* encoding flag (general purpose bit 11) is set, but many real-world archives
* (e.g. those produced by macOS / Linux unzip / Python's zipfile) write UTF-8
* filenames without setting that flag. Recover the original UTF-8 bytes from
* fflate's per-byte string and re-decode them; if the result isn't valid
* UTF-8 we fall back to the as-decoded name.
*/
function decodeZipFileName(name: string): string {
const bytes = new Uint8Array(name.length);
for (let i = 0; i < name.length; i++) {
bytes[i] = name.charCodeAt(i) & 0xff;
}
try {
return utf8Decoder.decode(bytes);
} catch {
return name;
}
}

View File

@@ -66,11 +66,6 @@ export function startLocalServerWorker() {
}
export function attachServiceWorkerBridge() {
if (!("serviceWorker" in navigator) || !navigator.serviceWorker) {
console.warn("[LocalBridge] Service workers not available — skipping bridge setup");
return;
}
navigator.serviceWorker.addEventListener("message", async (event) => {
const msg = event.data;
if (!msg || msg.type !== "LOCAL_FETCH") return;

View File

@@ -48,9 +48,6 @@ console.log("[Worker] Error handlers installed, loading modules...");
// =============================================================================
import type { BrowserRouter } from './lightweight/browser_router';
// Build-time constant injected by Vite (see `define` in vite.config.mts).
declare const __TRILIUM_INTEGRATION_TEST__: string;
// =============================================================================
// MODULE STATE (populated by dynamic imports)
// =============================================================================
@@ -58,11 +55,8 @@ let BrowserSqlProvider: typeof import('./lightweight/sql_provider').default;
let WorkerMessagingProvider: typeof import('./lightweight/messaging_provider').default;
let BrowserExecutionContext: typeof import('./lightweight/cls_provider').default;
let BrowserCryptoProvider: typeof import('./lightweight/crypto_provider').default;
let BrowserZipProvider: typeof import('./lightweight/zip_provider').default;
let FetchRequestProvider: typeof import('./lightweight/request_provider').default;
let StandalonePlatformProvider: typeof import('./lightweight/platform_provider').default;
let StandaloneLogService: typeof import('./lightweight/log_provider').default;
let StandaloneBackupService: typeof import('./lightweight/backup_provider').default;
let translationProvider: typeof import('./lightweight/translation_provider').default;
let createConfiguredRouter: typeof import('./lightweight/browser_routes').createConfiguredRouter;
@@ -77,172 +71,6 @@ let initPromise: Promise<void> | null = null;
let initError: Error | null = null;
let queryString = "";
/**
* Check whether a file exists at the OPFS root. Used to decide whether the
* test fixture needs to be seeded or whether we should reuse the existing
* DB (preserving changes made earlier in the same test — e.g. options set
* before a page reload).
*/
async function opfsFileExists(fileName: string): Promise<boolean> {
if (typeof navigator === "undefined" || !navigator.storage?.getDirectory) {
return false;
}
const root = await navigator.storage.getDirectory();
try {
await root.getFileHandle(fileName);
return true;
} catch {
return false;
}
}
/**
* Write a raw byte buffer to an OPFS file. Used to drop the test fixture DB
* into OPFS as a regular file so SQLite's OPFS VFS can then open it. Requires
* a Worker context (`createSyncAccessHandle` isn't available on the main thread
* in some browsers).
*/
async function writeOpfsFile(fileName: string, buffer: Uint8Array): Promise<void> {
const root = await navigator.storage.getDirectory();
const fileHandle = await root.getFileHandle(fileName, { create: true });
const accessHandle = await (fileHandle as unknown as {
createSyncAccessHandle(): Promise<{
truncate(size: number): void;
write(buffer: Uint8Array, opts: { at: number }): number;
flush(): void;
close(): void;
}>;
}).createSyncAccessHandle();
try {
accessHandle.truncate(0);
accessHandle.write(buffer, { at: 0 });
accessHandle.flush();
} finally {
accessHandle.close();
}
}
/**
* Read a file from the OPFS root into a Uint8Array.
* Used during migration from legacy OPFS VFS to SAHPool.
*/
async function readOpfsFile(fileName: string): Promise<Uint8Array> {
const root = await navigator.storage.getDirectory();
const fileHandle = await root.getFileHandle(fileName);
const file = await fileHandle.getFile();
return new Uint8Array(await file.arrayBuffer());
}
/**
* Delete a file from the OPFS root.
* Used to clean up the legacy OPFS database after migration to SAHPool.
*/
async function deleteOpfsFile(fileName: string): Promise<void> {
const root = await navigator.storage.getDirectory();
await root.removeEntry(fileName);
}
/**
* Verify that a buffer contains a valid SQLite database by checking the
* 16-byte magic string "SQLite format 3\0".
*/
function assertSqliteMagic(buffer: Uint8Array, source: string): void {
const magic = new TextDecoder().decode(buffer.subarray(0, 15));
if (magic !== "SQLite format 3") {
throw new Error(
`${source} is not a SQLite database ` +
`(got ${buffer.byteLength} bytes starting with "${magic}"). ` +
`The file is likely missing and the SPA fallback is returning index.html.`
);
}
}
/**
* Migrate database from legacy OPFS VFS to SAHPool VFS.
* Checks if a legacy `/trilium.db` file exists in the OPFS root, and if the
* SAHPool doesn't already have it. If migration is needed, the legacy file is
* read, imported into the pool, and then deleted.
*/
async function migrateFromLegacyOpfs(dbName: string): Promise<void> {
const legacyFileName = dbName.replace(/^\//, ""); // strip leading slash
const legacyExists = await opfsFileExists(legacyFileName);
if (!legacyExists) {
return; // Nothing to migrate
}
// Check if SAHPool already has this DB (e.g. migration already happened)
const poolFiles = sqlProvider!.sahPool!.getFileNames();
if (poolFiles.includes(dbName)) {
console.log("[Worker] SAHPool already contains the database, deleting legacy OPFS file...");
await deleteOpfsFile(legacyFileName);
return;
}
console.log("[Worker] Migrating database from legacy OPFS to SAHPool VFS...");
const startTime = performance.now();
const buffer = await readOpfsFile(legacyFileName);
assertSqliteMagic(buffer, "Legacy OPFS database");
await sqlProvider!.sahPool!.importDb(dbName, buffer);
await deleteOpfsFile(legacyFileName);
// Also clean up legacy journal/WAL files if they exist
for (const suffix of ["-journal", "-wal", "-shm"]) {
try {
await deleteOpfsFile(legacyFileName + suffix);
} catch {
// Ignore — file may not exist
}
}
const elapsed = performance.now() - startTime;
console.log(`[Worker] Migration complete in ${elapsed.toFixed(2)}ms (${buffer.byteLength} bytes)`);
}
/**
* Load the test fixture database for integration tests.
* Seeds from the fixture if not already present, using SAHPool when available.
*/
async function loadTestDatabase(sahPoolAvailable: boolean, dbName: string): Promise<void> {
if (sahPoolAvailable) {
const poolFiles = sqlProvider!.sahPool!.getFileNames();
if (!poolFiles.includes(dbName)) {
console.log("[Worker] Integration test mode: seeding fixture database into SAHPool...");
const buffer = await fetchTestFixture();
await sqlProvider!.sahPool!.importDb(dbName, buffer);
} else {
console.log("[Worker] Integration test mode: reusing existing SAHPool DB from earlier in this test");
}
sqlProvider!.loadFromSahPool(dbName);
} else {
// Fallback to legacy OPFS for tests when SAHPool isn't available
const legacyFileName = dbName.replace(/^\//, "");
if (!(await opfsFileExists(legacyFileName))) {
console.log("[Worker] Integration test mode: seeding fixture database into OPFS...");
const buffer = await fetchTestFixture();
await writeOpfsFile(legacyFileName, buffer);
} else {
console.log("[Worker] Integration test mode: reusing existing OPFS DB from earlier in this test");
}
sqlProvider!.loadFromOpfs(dbName);
}
}
/**
* Fetch the test fixture database and validate it.
*/
async function fetchTestFixture(): Promise<Uint8Array> {
const response = await fetch("/test-fixtures/document.db");
if (!response.ok) {
throw new Error(`Failed to fetch test fixture: ${response.status} ${response.statusText}`);
}
const buffer = new Uint8Array(await response.arrayBuffer());
assertSqliteMagic(buffer, "Test fixture at /test-fixtures/document.db");
return buffer;
}
/**
* Load all required modules using dynamic imports.
* This allows errors to be caught by our error handlers.
@@ -254,11 +82,8 @@ async function loadModules(): Promise<void> {
messagingModule,
clsModule,
cryptoModule,
zipModule,
requestModule,
platformModule,
logModule,
backupModule,
translationModule,
routesModule
] = await Promise.all([
@@ -266,11 +91,8 @@ async function loadModules(): Promise<void> {
import('./lightweight/messaging_provider.js'),
import('./lightweight/cls_provider.js'),
import('./lightweight/crypto_provider.js'),
import('./lightweight/zip_provider.js'),
import('./lightweight/request_provider.js'),
import('./lightweight/platform_provider.js'),
import('./lightweight/log_provider.js'),
import('./lightweight/backup_provider.js'),
import('./lightweight/translation_provider.js'),
import('./lightweight/browser_routes.js')
]);
@@ -279,11 +101,8 @@ async function loadModules(): Promise<void> {
WorkerMessagingProvider = messagingModule.default;
BrowserExecutionContext = clsModule.default;
BrowserCryptoProvider = cryptoModule.default;
BrowserZipProvider = zipModule.default;
FetchRequestProvider = requestModule.default;
StandalonePlatformProvider = platformModule.default;
StandaloneLogService = logModule.default;
StandaloneBackupService = backupModule.default;
translationProvider = translationModule.default;
createConfiguredRouter = routesModule.createConfiguredRouter;
@@ -314,44 +133,14 @@ async function initialize(): Promise<void> {
console.log("[Worker] Initializing SQLite WASM...");
await sqlProvider!.initWasm();
// Try to install the SAHPool VFS (preferred: supports WAL, much faster)
let sahPoolAvailable = false;
try {
await sqlProvider!.installSahPool();
sahPoolAvailable = true;
} catch (e) {
console.warn("[Worker] SAHPool VFS not available, will fall back to legacy OPFS or in-memory:", e);
}
// Integration test mode is baked in at build time via the
// __TRILIUM_INTEGRATION_TEST__ Vite define (derived from the
// TRILIUM_INTEGRATION_TEST env var when the bundle was built).
const integrationTestMode = __TRILIUM_INTEGRATION_TEST__;
const dbName = "/trilium.db";
if (integrationTestMode === "memory") {
// Use OPFS for the DB in integration test mode so option changes
// (and any other writes) survive page reloads within a single test.
// Playwright gives each test a fresh BrowserContext, which means a
// fresh OPFS — so on the first worker init of a test we seed from
// the fixture, and subsequent inits in the same test reuse it.
await loadTestDatabase(sahPoolAvailable, dbName);
} else if (sahPoolAvailable) {
// SAHPool available — migrate from legacy OPFS if needed, then open
await migrateFromLegacyOpfs(dbName);
console.log("[Worker] SAHPool available, loading persistent database (WAL mode)...");
sqlProvider!.loadFromSahPool(dbName);
} else if (sqlProvider!.isOpfsAvailable()) {
// Fall back to legacy OPFS VFS (no WAL, slower writes).
// This only kicks in if SAHPool installation failed for some
// reason but SharedArrayBuffer + legacy OPFS are both available.
console.warn("[Worker] SAHPool unavailable; using legacy OPFS VFS (no WAL mode).");
sqlProvider!.loadFromOpfs(dbName);
// Try to use OPFS for persistent storage
if (sqlProvider!.isOpfsAvailable()) {
console.log("[Worker] OPFS available, loading persistent database...");
sqlProvider!.loadFromOpfs("/trilium.db");
} else {
// Fall back to in-memory database (non-persistent).
// SAHPool only needs a Worker + OPFS API, so reaching this
// branch means the environment lacks OPFS entirely.
// Fall back to in-memory database (non-persistent)
console.warn("[Worker] OPFS not available, using in-memory database (data will not persist)");
console.warn("[Worker] To enable persistence, ensure COOP/COEP headers are set by the server");
sqlProvider!.loadFromMemory();
}
@@ -360,30 +149,14 @@ async function initialize(): Promise<void> {
console.log("[Worker] Loading @triliumnext/core...");
const schemaModule = await import("@triliumnext/core/src/assets/schema.sql?raw");
coreModule = await import("@triliumnext/core");
// Initialize log service with OPFS persistence
const logService = new StandaloneLogService();
await logService.initialize();
console.log("[Worker] Log service initialized with OPFS");
await coreModule.initializeCore({
executionContext: new BrowserExecutionContext(),
crypto: new BrowserCryptoProvider(),
zip: new BrowserZipProvider(),
zipExportProviderFactory: (await import("./lightweight/zip_export_provider_factory.js")).standaloneZipExportProviderFactory,
messaging: messagingProvider!,
request: new FetchRequestProvider(),
platform: new StandalonePlatformProvider(queryString),
log: logService,
backup: new StandaloneBackupService(coreModule!.options),
translations: translationProvider,
schema: schemaModule.default,
getDemoArchive: async () => {
const response = await fetch("/server-assets/db/demo.zip");
if (!response.ok) return null;
return new Uint8Array(await response.arrayBuffer());
},
image: (await import("./services/image_provider.js")).standaloneImageProvider,
dbConfig: {
provider: sqlProvider!,
isReadOnly: false,
@@ -410,26 +183,10 @@ async function initialize(): Promise<void> {
if (coreModule.sql_init.isDbInitialized()) {
console.log("[Worker] Database already initialized, loading becca...");
await coreModule.becca_loader.beccaLoaded;
// `initTranslations` runs before `initSql` inside `initializeCore`
// (options_init needs translations, creating a chicken-and-egg),
// so it always defaults to "en" on a fresh worker boot. Now that
// the DB is up we can read the real locale and, if it differs,
// switch i18next and rebuild the hidden subtree with the correct
// titles. This must happen BEFORE `startScheduler` registers its
// own `dbReady.then(checkHiddenSubtree)` so the scheduled rebuild
// sees the right language.
const dbLocale = coreModule.options.getOptionOrNull("locale");
if (dbLocale && dbLocale !== "en") {
console.log(`[Worker] Reconciling i18next locale to "${dbLocale}" from DB`);
await coreModule.i18n.changeLanguage(dbLocale);
}
} else {
console.log("[Worker] Database not initialized, skipping becca load (will be loaded during DB initialization)");
}
coreModule.scheduler.startScheduler();
console.log("[Worker] Initialization complete");
} catch (error) {
initError = error instanceof Error ? error : new Error(String(error));
@@ -469,9 +226,19 @@ async function dispatch(request: LocalRequest) {
return appRouter.dispatch(request.method, request.url, request.body, request.headers);
}
// Wait for the INIT message before initializing so that queryString
// (which may contain ?integrationTest=memory for e2e) is available.
let initReceived = false;
// Start initialization immediately when the worker loads
console.log("[Worker] Starting initialization...");
initialize().catch(err => {
console.error("[Worker] Initialization failed:", err);
// Post error to main thread
self.postMessage({
type: "WORKER_ERROR",
error: {
message: String(err?.message || err),
stack: err?.stack
}
});
});
self.onmessage = async (event) => {
const msg = event.data;
@@ -479,20 +246,6 @@ self.onmessage = async (event) => {
if (msg.type === "INIT") {
queryString = msg.queryString || "";
if (!initReceived) {
initReceived = true;
console.log("[Worker] Starting initialization...");
initialize().catch(err => {
console.error("[Worker] Initialization failed:", err);
self.postMessage({
type: "WORKER_ERROR",
error: {
message: String(err?.message || err),
stack: err?.stack
}
});
});
}
return;
}

View File

@@ -1,21 +1,8 @@
import { attachServiceWorkerBridge, startLocalServerWorker } from "./local-bridge.js";
async function waitForServiceWorkerControl(): Promise<void> {
if (!("serviceWorker" in navigator) || !navigator.serviceWorker) {
const isSecure = location.protocol === "https:" || location.hostname === "localhost" || location.hostname === "127.0.0.1";
const hints: string[] = [];
if (!isSecure) {
hints.push(`The page is served over ${location.protocol}//${location.hostname} which is not a secure context. Service workers require HTTPS (or localhost).`);
}
if (window.isSecureContext === false) {
hints.push("The browser reports this is not a secure context.");
}
throw new Error(
"Service workers are not available in this browser.\n\n" +
"Trilium standalone mode requires service workers to function.\n" +
(hints.length ? "\nPossible cause:\n- " + hints.join("\n- ") + "\n" : "") +
"\nTo fix this, access the application over HTTPS or via localhost."
);
if (!("serviceWorker" in navigator)) {
throw new Error("Service Worker not supported in this browser");
}
// If already controlling, we're good
@@ -80,7 +67,7 @@ async function bootstrap() {
<div style="padding: 40px; max-width: 600px; margin: 0 auto; font-family: system-ui, sans-serif;">
<h1 style="color: #d32f2f;">Failed to Initialize</h1>
<p>The application failed to start. Please check the browser console for details.</p>
<pre style="background: #f5f5f5; padding: 16px; border-radius: 4px; overflow: auto; white-space: pre-wrap; word-wrap: break-word;">${err instanceof Error ? err.message : String(err)}</pre>
<pre style="background: #f5f5f5; padding: 16px; border-radius: 4px; overflow: auto;">${err instanceof Error ? err.message : String(err)}</pre>
<button onclick="location.reload()" style="padding: 12px 24px; background: #1976d2; color: white; border: none; border-radius: 4px; cursor: pointer; font-size: 16px;">
Reload Page
</button>

View File

@@ -1,67 +0,0 @@
import { describe, it, expect } from "vitest";
import { data_encryption } from "@triliumnext/core";
// Note: BrowserCryptoProvider is already initialized via test_setup.ts
describe("data_encryption with BrowserCryptoProvider", () => {
it("should encrypt and decrypt ASCII text correctly", () => {
const key = new Uint8Array(16).fill(42);
const plainText = "Hello, World!";
const encrypted = data_encryption.encrypt(key, plainText);
expect(typeof encrypted).toBe("string");
expect(encrypted.length).toBeGreaterThan(0);
const decrypted = data_encryption.decryptString(key, encrypted);
expect(decrypted).toBe(plainText);
});
it("should encrypt and decrypt UTF-8 text correctly", () => {
const key = new Uint8Array(16).fill(42);
const plainText = "Привет мир! 你好世界! 🎉";
const encrypted = data_encryption.encrypt(key, plainText);
const decrypted = data_encryption.decryptString(key, encrypted);
expect(decrypted).toBe(plainText);
});
it("should encrypt and decrypt empty string", () => {
const key = new Uint8Array(16).fill(42);
const plainText = "";
const encrypted = data_encryption.encrypt(key, plainText);
const decrypted = data_encryption.decryptString(key, encrypted);
expect(decrypted).toBe(plainText);
});
it("should encrypt and decrypt binary data", () => {
const key = new Uint8Array(16).fill(42);
const plainData = new Uint8Array([0, 1, 2, 255, 128, 64]);
const encrypted = data_encryption.encrypt(key, plainData);
const decrypted = data_encryption.decrypt(key, encrypted);
expect(decrypted).toBeInstanceOf(Uint8Array);
expect(Array.from(decrypted as Uint8Array)).toEqual(Array.from(plainData));
});
it("should fail decryption with wrong key", () => {
const key1 = new Uint8Array(16).fill(42);
const key2 = new Uint8Array(16).fill(43);
const plainText = "Secret message";
const encrypted = data_encryption.encrypt(key1, plainText);
// decrypt returns false when digest doesn't match
const result = data_encryption.decrypt(key2, encrypted);
expect(result).toBe(false);
});
it("should handle large content", () => {
const key = new Uint8Array(16).fill(42);
const plainText = "x".repeat(100000);
const encrypted = data_encryption.encrypt(key, plainText);
const decrypted = data_encryption.decryptString(key, encrypted);
expect(decrypted).toBe(plainText);
});
});

View File

@@ -1,96 +0,0 @@
/**
* Standalone image provider implementation.
* Uses pure JavaScript for format detection without compression.
* Images are saved as-is without resizing.
*/
import type { ImageProvider, ImageFormat, ProcessedImage } from "@triliumnext/core";
/**
* Detect image type from buffer using magic bytes.
*/
function getImageTypeFromBuffer(buffer: Uint8Array): ImageFormat | null {
if (buffer.length < 12) {
return null;
}
// Check for SVG (text-based)
if (isSvg(buffer)) {
return { ext: "svg", mime: "image/svg+xml" };
}
// JPEG: FF D8 FF
if (buffer[0] === 0xff && buffer[1] === 0xd8 && buffer[2] === 0xff) {
return { ext: "jpg", mime: "image/jpeg" };
}
// PNG: 89 50 4E 47 0D 0A 1A 0A
if (
buffer[0] === 0x89 &&
buffer[1] === 0x50 &&
buffer[2] === 0x4e &&
buffer[3] === 0x47 &&
buffer[4] === 0x0d &&
buffer[5] === 0x0a &&
buffer[6] === 0x1a &&
buffer[7] === 0x0a
) {
return { ext: "png", mime: "image/png" };
}
// GIF: "GIF"
if (buffer[0] === 0x47 && buffer[1] === 0x49 && buffer[2] === 0x46) {
return { ext: "gif", mime: "image/gif" };
}
// WebP: RIFF....WEBP
if (
buffer[0] === 0x52 &&
buffer[1] === 0x49 &&
buffer[2] === 0x46 &&
buffer[3] === 0x46 &&
buffer[8] === 0x57 &&
buffer[9] === 0x45 &&
buffer[10] === 0x42 &&
buffer[11] === 0x50
) {
return { ext: "webp", mime: "image/webp" };
}
// BMP: "BM"
if (buffer[0] === 0x42 && buffer[1] === 0x4d) {
return { ext: "bmp", mime: "image/bmp" };
}
return null;
}
/**
* Check if buffer contains SVG content.
*/
function isSvg(buffer: Uint8Array): boolean {
const maxBytes = Math.min(buffer.length, 1000);
let str = "";
for (let i = 0; i < maxBytes; i++) {
str += String.fromCharCode(buffer[i]);
}
const trimmed = str.trim().toLowerCase();
return trimmed.startsWith("<svg") || (trimmed.startsWith("<?xml") && trimmed.includes("<svg"));
}
export const standaloneImageProvider: ImageProvider = {
getImageType(buffer: Uint8Array): ImageFormat | null {
return getImageTypeFromBuffer(buffer);
},
async processImage(buffer: Uint8Array, _originalName: string, _shrink: boolean): Promise<ProcessedImage> {
// Standalone doesn't do compression - just detect format and return original
const format = getImageTypeFromBuffer(buffer) || { ext: "dat", mime: "application/octet-stream" };
return {
buffer,
format
};
}
};

View File

@@ -85,22 +85,12 @@ async function networkFirst(request) {
}
}
async function forwardToClientLocalServer(request, _clientId) {
// Find the main app window to handle the request
// We must route to the main app (which has the local bridge), not iframes like PDF.js viewer
// @ts-expect-error - self.clients is valid in service worker context
const all = await self.clients.matchAll({ type: "window", includeUncontrolled: true });
async function forwardToClientLocalServer(request, clientId) {
// Find a client to handle the request (prefer the initiating client if available)
let client = clientId ? await self.clients.get(clientId) : null;
// Find the main app window - it's the one NOT serving pdfjs or other embedded content
// The main app has the local bridge handler for LOCAL_FETCH messages
let client = all.find((c: { url: string }) => {
const url = new URL(c.url);
// Main app is at root or index.html, not in /pdfjs/ or other iframe paths
return !url.pathname.startsWith("/pdfjs/");
}) || null;
// If no main app window found, fall back to any available client
if (!client) {
const all = await self.clients.matchAll({ type: "window", includeUncontrolled: true });
client = all[0] || null;
}
@@ -172,13 +162,6 @@ self.addEventListener("fetch", (event) => {
// Only handle same-origin
if (url.origin !== self.location.origin) return;
// API-ish: local-first via bridge (must be checked before navigate handling,
// because export triggers a navigation to an /api/ URL)
if (isLocalFirst(url)) {
event.respondWith(forwardToClientLocalServer(event.request, event.clientId));
return;
}
// HTML files: network-first to ensure updates are reflected immediately
if (event.request.mode === "navigate" || url.pathname.endsWith(".html")) {
event.respondWith(networkFirst(event.request));
@@ -186,11 +169,17 @@ self.addEventListener("fetch", (event) => {
}
// Static assets: cache-first for performance
if (event.request.method === "GET") {
if (event.request.method === "GET" && !isLocalFirst(url)) {
event.respondWith(cacheFirst(event.request));
return;
}
// API-ish: local-first via bridge
if (isLocalFirst(url)) {
event.respondWith(forwardToClientLocalServer(event.request, event.clientId));
return;
}
// Default
event.respondWith(fetch(event.request));
});

View File

@@ -1,144 +0,0 @@
import { createRequire } from "node:module";
import { readFileSync } from "node:fs";
import { fileURLToPath } from "node:url";
import { initializeCore, options } from "@triliumnext/core";
import schemaSql from "@triliumnext/core/src/assets/schema.sql?raw";
import HappyDomHtmlParser from "happy-dom/lib/html-parser/HTMLParser.js";
import serverEnTranslations from "../../server/src/assets/translations/en/server.json";
import { beforeAll } from "vitest";
import StandaloneBackupService from "./lightweight/backup_provider.js";
import BrowserExecutionContext from "./lightweight/cls_provider.js";
import BrowserCryptoProvider from "./lightweight/crypto_provider.js";
import StandalonePlatformProvider from "./lightweight/platform_provider.js";
import BrowserSqlProvider from "./lightweight/sql_provider.js";
import BrowserZipProvider from "./lightweight/zip_provider.js";
import { standaloneImageProvider } from "./services/image_provider.js";
// =============================================================================
// SQLite WASM compatibility shims
// =============================================================================
// The @sqlite.org/sqlite-wasm package loads its .wasm via fetch, and its
// bundled `instantiateWasm` hook overrides any user-supplied alternative.
// Two things go wrong under vitest + happy-dom:
// 1. happy-dom's `fetch()` refuses `file://` URLs.
// 2. happy-dom installs its own Response global, which Node's
// `WebAssembly.instantiateStreaming` rejects ("Received an instance of
// Response" — it wants undici's Response).
// We intercept fetch for file:// URLs ourselves and force instantiateStreaming
// to fall back to the ArrayBuffer path.
const fileFetchCache = new Map<string, ArrayBuffer>();
function readFileAsArrayBuffer(url: string): ArrayBuffer {
let cached = fileFetchCache.get(url);
if (!cached) {
const bytes = readFileSync(fileURLToPath(url));
cached = bytes.buffer.slice(bytes.byteOffset, bytes.byteOffset + bytes.byteLength) as ArrayBuffer;
fileFetchCache.set(url, cached);
}
return cached;
}
const originalFetch = globalThis.fetch;
globalThis.fetch = (async (input: RequestInfo | URL, init?: RequestInit) => {
const url = typeof input === "string"
? input
: input instanceof URL
? input.href
: input.url;
if (url.startsWith("file://")) {
const body = readFileAsArrayBuffer(url);
return new Response(body, {
status: 200,
headers: { "Content-Type": "application/wasm" }
});
}
return originalFetch(input as RequestInfo, init);
}) as typeof fetch;
WebAssembly.instantiateStreaming = (async (source, importObject) => {
const response = await source;
const bytes = await response.arrayBuffer();
return WebAssembly.instantiate(bytes, importObject);
}) as typeof WebAssembly.instantiateStreaming;
// =============================================================================
// happy-dom HTMLParser spec compliance patch
// =============================================================================
// Per HTML5 parsing spec, a single U+000A LINE FEED immediately after a <pre>,
// <listing>, or <textarea> start tag must be ignored ("newlines at the start
// of pre blocks are ignored as an authoring convenience"). Real browsers and
// domino (which the server runtime uses via turnish) both implement this;
// happy-dom (as of 20.8.9) does not — it keeps the LF as a text node.
//
// That difference makes turnish's markdown export produce different output
// under happy-dom vs. production, breaking markdown.spec.ts > "exports jQuery
// code in table properly". Patch HTMLParser.parse to pre-process the string.
const LEADING_LF_IN_PRE_RE = /(<(?:pre|listing|textarea)\b[^>]*>)(\r\n|\r|\n)/gi;
const originalHtmlParserParse = (HappyDomHtmlParser as unknown as {
prototype: { parse(html: string, rootNode?: unknown): unknown };
}).prototype.parse;
(HappyDomHtmlParser as unknown as {
prototype: { parse(html: string, rootNode?: unknown): unknown };
}).prototype.parse = function (html: string, rootNode?: unknown) {
const patched = typeof html === "string"
? html.replace(LEADING_LF_IN_PRE_RE, "$1")
: html;
return originalHtmlParserParse.call(this, patched, rootNode);
};
// =============================================================================
// Core initialization for standalone-flavored tests
// =============================================================================
// Mirror what apps/server/spec/setup.ts does: load the pre-seeded integration
// fixture DB into an in-memory sqlite-wasm instance, then initialize core
// against it with the standalone (browser) providers. Each vitest worker gets
// a fresh copy because tests run in forks (per the default pool).
const require = createRequire(import.meta.url);
const fixtureDb = readFileSync(
require.resolve("@triliumnext/core/src/test/fixtures/document.db")
);
beforeAll(async () => {
const sqlProvider = new BrowserSqlProvider();
await sqlProvider.initWasm();
sqlProvider.loadFromBuffer(fixtureDb);
await initializeCore({
executionContext: new BrowserExecutionContext(),
crypto: new BrowserCryptoProvider(),
zip: new BrowserZipProvider(),
zipExportProviderFactory: (
await import("./lightweight/zip_export_provider_factory.js")
).standaloneZipExportProviderFactory,
// i18next must be wired up — keyboard_actions.ts and other modules
// call `t()` and throw if translations are missing. Inline the
// en/server.json resources via vite's JSON import so we don't need a
// backend in tests.
translations: async (i18nextInstance, locale) => {
await i18nextInstance.init({
lng: locale,
fallbackLng: "en",
ns: "server",
defaultNS: "server",
resources: {
en: { server: serverEnTranslations }
}
});
},
platform: new StandalonePlatformProvider(""),
backup: new StandaloneBackupService(options),
image: standaloneImageProvider,
schema: schemaSql,
dbConfig: {
provider: sqlProvider,
isReadOnly: false,
onTransactionCommit: () => {},
onTransactionRollback: () => {}
}
});
});

View File

@@ -0,0 +1,31 @@
/// <reference types="vite/client" />
interface ImportMetaEnv {
readonly VITE_APP_TITLE: string
}
interface ImportMeta {
readonly env: ImportMetaEnv
}
interface Window {
glob: {
assetPath: string;
themeCssUrl?: string;
themeUseNextAsBase?: string;
iconPackCss: string;
device: string;
headingStyle: string;
layoutOrientation: string;
platform: string;
isElectron: boolean;
hasNativeTitleBar: boolean;
hasBackgroundEffects: boolean;
currentLocale: {
id: string;
rtl: boolean;
};
activeDialog: any;
};
global: typeof globalThis;
}

View File

@@ -1,9 +1,7 @@
import fs from "fs";
import { join, resolve, sep } from "path";
import prefresh from "@prefresh/vite";
import { defineConfig, type Plugin } from "vite";
import { viteStaticCopy } from "vite-plugin-static-copy";
import prefresh from '@prefresh/vite';
import { join } from 'path';
import { defineConfig } from 'vite';
import { viteStaticCopy } from 'vite-plugin-static-copy';
const clientAssets = ["assets", "stylesheets", "fonts", "translations"];
@@ -11,15 +9,15 @@ const isDev = process.env.NODE_ENV === "development";
// Watch client files and trigger reload in development
const clientWatchPlugin = () => ({
name: "client-watch",
name: 'client-watch',
configureServer(server: any) {
if (isDev) {
// Watch client source files (adjusted for new root)
server.watcher.add("../../client/src/**/*");
server.watcher.on("change", (file: string) => {
if (file.includes("../../client/src/")) {
server.watcher.add('../../client/src/**/*');
server.watcher.on('change', (file: string) => {
if (file.includes('../../client/src/')) {
server.ws.send({
type: "full-reload"
type: 'full-reload'
});
}
});
@@ -27,68 +25,16 @@ const clientWatchPlugin = () => ({
}
});
// Serve PDF.js files directly in dev mode to bypass SPA fallback
const pdfjsServePlugin = (): Plugin => ({
name: "pdfjs-serve",
configureServer(server) {
const pdfjsRoot = join(__dirname, "../../packages/pdfjs-viewer/dist");
server.middlewares.use((req, res, next) => {
if (!req.url?.startsWith("/pdfjs/")) {
return next();
}
// Map /pdfjs/web/... to dist/web/...
// Map /pdfjs/build/... to dist/build/...
// Strip query string (e.g., ?v=0.102.2) before resolving path
const urlWithoutQuery = req.url.split("?")[0];
const relativePath = urlWithoutQuery.replace(/^\/pdfjs\//, "");
const filePath = join(pdfjsRoot, relativePath);
// Security: resolve both paths to prevent prefix-collision attacks
// (e.g. pdfjsRoot="/foo/bar" matching "/foo/bar2/evil.js")
const resolvedRoot = resolve(pdfjsRoot);
const resolvedFilePath = resolve(filePath);
if (!resolvedFilePath.startsWith(resolvedRoot + sep)) {
return next();
}
if (fs.existsSync(filePath) && fs.statSync(filePath).isFile()) {
const ext = filePath.split(".").pop() || "";
const mimeTypes: Record<string, string> = {
html: "text/html",
css: "text/css",
js: "application/javascript",
mjs: "application/javascript",
wasm: "application/wasm",
png: "image/png",
svg: "image/svg+xml",
json: "application/json"
};
res.setHeader("Content-Type", mimeTypes[ext] || "application/octet-stream");
// Match isolation headers from main page for iframe compatibility
res.setHeader("Cross-Origin-Opener-Policy", "same-origin");
res.setHeader("Cross-Origin-Embedder-Policy", "require-corp");
fs.createReadStream(filePath).pipe(res);
} else {
next();
}
});
}
});
// Always copy SQLite WASM files so they're available to the module
const sqliteWasmPlugin = viteStaticCopy({
targets: [
{
src: "../../../node_modules/@sqlite.org/sqlite-wasm/sqlite-wasm/jswasm/sqlite3.wasm",
dest: "assets",
rename: { stripBase: true }
dest: "assets"
},
{
src: "../../../node_modules/@sqlite.org/sqlite-wasm/sqlite-wasm/jswasm/sqlite3-opfs-async-proxy.js",
dest: "assets",
rename: { stripBase: true }
dest: "assets"
}
]
});
@@ -97,9 +43,8 @@ let plugins: any = [
sqliteWasmPlugin, // Always include SQLite WASM files
viteStaticCopy({
targets: clientAssets.map((asset) => ({
src: `../../client/src/${asset}/**/*`,
dest: asset,
rename: { stripBase: 3 }
src: `../../client/src/${asset}/*`,
dest: asset
})),
// Enable watching in development
...(isDev && {
@@ -111,33 +56,15 @@ let plugins: any = [
viteStaticCopy({
targets: [
{
src: "../../server/src/assets/**/*",
dest: "server-assets",
rename: { stripBase: 3 }
}
]
}),
// PDF.js viewer for PDF preview support
// stripBase: 4 removes packages/pdfjs-viewer/dist/web (or /build)
viteStaticCopy({
targets: [
{
src: "../../../packages/pdfjs-viewer/dist/web/**/*",
dest: "pdfjs/web",
rename: { stripBase: 4 }
},
{
src: "../../../packages/pdfjs-viewer/dist/build/**/*",
dest: "pdfjs/build",
rename: { stripBase: 4 }
src: "../../server/src/assets/*",
dest: "server-assets"
}
]
}),
// Watch client files for changes in development
...(isDev ? [
prefresh(),
clientWatchPlugin(),
pdfjsServePlugin()
clientWatchPlugin()
] : [])
];
@@ -145,9 +72,10 @@ if (!isDev) {
plugins = [
...plugins,
viteStaticCopy({
structured: true,
targets: [
{
src: "../../../node_modules/@excalidraw/excalidraw/dist/prod/fonts/**/*",
src: "../../../node_modules/@excalidraw/excalidraw/dist/prod/fonts/*",
dest: "",
}
]
@@ -155,27 +83,6 @@ if (!isDev) {
]
}
// Include the integration test fixture database for e2e tests
if (process.env.TRILIUM_INTEGRATION_TEST) {
plugins = [
...plugins,
viteStaticCopy({
targets: [
{
// Forward slashes are required because fast-glob (used
// internally) treats backslashes as escape characters on
// Windows. `stripBase` drops the source's directory
// structure so the file lands flat at `test-fixtures/document.db`
// rather than mirroring the `packages/trilium-core/...` path.
src: join(__dirname, "../../packages/trilium-core/src/test/fixtures/document.db").replace(/\\/g, "/"),
dest: "test-fixtures",
rename: { stripBase: true }
}
]
})
]
}
export default defineConfig(() => ({
root: join(__dirname, 'src'), // Set src as root so index.html is served from /
envDir: __dirname, // Load .env files from client-standalone directory, not src/
@@ -240,12 +147,6 @@ export default defineConfig(() => ({
"Cross-Origin-Embedder-Policy": "require-corp"
}
},
preview: {
headers: {
"Cross-Origin-Opener-Policy": "same-origin",
"Cross-Origin-Embedder-Policy": "require-corp"
}
},
optimizeDeps: {
exclude: ['@sqlite.org/sqlite-wasm', '@triliumnext/core']
},
@@ -279,30 +180,9 @@ export default defineConfig(() => ({
}
},
test: {
environment: "happy-dom",
setupFiles: [join(__dirname, "src/test_setup.ts")],
dir: join(__dirname),
include: [
"src/**/*.{test,spec}.{ts,tsx}",
"../../packages/trilium-core/src/**/*.{test,spec}.{ts,tsx}"
],
server: {
deps: {
inline: ["@sqlite.org/sqlite-wasm"]
}
},
alias: {
// The package's `node.mjs` entry references a non-existent
// `sqlite3-node.mjs`. Force the browser-style entry which works
// under Node + happy-dom too.
"@sqlite.org/sqlite-wasm": join(
__dirname,
"../../node_modules/@sqlite.org/sqlite-wasm/index.mjs"
)
}
environment: "happy-dom"
},
define: {
"process.env.IS_PREACT": JSON.stringify("true"),
__TRILIUM_INTEGRATION_TEST__: JSON.stringify(process.env.TRILIUM_INTEGRATION_TEST ?? ""),
}
}));

View File

@@ -1,6 +1,6 @@
{
"name": "@triliumnext/client",
"version": "0.102.2",
"version": "0.102.1",
"description": "JQuery-based client for TriliumNext, used for both web and desktop (via Electron)",
"private": true,
"license": "AGPL-3.0-only",
@@ -27,49 +27,50 @@
"@maplibre/maplibre-gl-leaflet": "0.1.3",
"@mermaid-js/layout-elk": "0.2.1",
"@mind-elixir/node-menu": "5.0.1",
"@preact/signals": "2.9.0",
"@popperjs/core": "2.11.8",
"@preact/signals": "2.8.2",
"@triliumnext/ckeditor5": "workspace:*",
"@triliumnext/codemirror": "workspace:*",
"@triliumnext/commons": "workspace:*",
"@triliumnext/highlightjs": "workspace:*",
"@triliumnext/share-theme": "workspace:*",
"@triliumnext/split.js": "workspace:*",
"@univerjs/preset-sheets-conditional-formatting": "0.20.1",
"@univerjs/preset-sheets-core": "0.20.1",
"@univerjs/preset-sheets-data-validation": "0.20.1",
"@univerjs/preset-sheets-filter": "0.20.1",
"@univerjs/preset-sheets-find-replace": "0.20.1",
"@univerjs/preset-sheets-note": "0.20.1",
"@univerjs/preset-sheets-sort": "0.20.1",
"@univerjs/presets": "0.20.1",
"@zumer/snapdom": "2.8.0",
"@univerjs/preset-sheets-conditional-formatting": "0.18.0",
"@univerjs/preset-sheets-core": "0.18.0",
"@univerjs/preset-sheets-data-validation": "0.18.0",
"@univerjs/preset-sheets-filter": "0.18.0",
"@univerjs/preset-sheets-find-replace": "0.18.0",
"@univerjs/preset-sheets-note": "0.18.0",
"@univerjs/preset-sheets-sort": "0.18.0",
"@univerjs/presets": "0.18.0",
"@zumer/snapdom": "2.6.0",
"autocomplete.js": "0.38.1",
"bootstrap": "5.3.8",
"boxicons": "2.1.4",
"clsx": "2.1.1",
"color": "5.0.3",
"debounce": "3.0.0",
"dompurify": "3.4.0",
"draggabilly": "3.0.0",
"force-graph": "1.51.4",
"htmldiff-js": "1.0.5",
"i18next": "26.0.4",
"i18next-http-backend": "3.0.4",
"force-graph": "1.51.2",
"globals": "17.4.0",
"i18next": "25.10.10",
"i18next-http-backend": "3.0.2",
"jquery": "4.0.0",
"jquery.fancytree": "2.38.5",
"jsplumb": "2.15.6",
"katex": "0.16.45",
"katex": "0.16.43",
"leaflet": "1.9.4",
"leaflet-gpx": "2.2.0",
"mark.js": "8.11.1",
"marked": "18.0.0",
"mermaid": "11.14.0",
"mind-elixir": "5.10.0",
"marked": "17.0.5",
"mermaid": "11.13.0",
"mind-elixir": "5.9.3",
"normalize.css": "8.0.1",
"panzoom": "9.4.4",
"preact": "10.29.1",
"react-i18next": "17.0.3",
"preact": "10.29.0",
"react-i18next": "16.6.6",
"react-window": "2.2.7",
"reveal.js": "6.0.1",
"reveal.js": "6.0.0",
"rrule": "2.8.1",
"svg-pan-zoom": "3.6.2",
"tabulator-tables": "6.4.0",
@@ -77,7 +78,7 @@
},
"devDependencies": {
"@ckeditor/ckeditor5-inspector": "5.0.0",
"@prefresh/vite": "3.0.0",
"@prefresh/vite": "2.4.12",
"@types/bootstrap": "5.2.10",
"@types/jquery": "4.0.0",
"@types/leaflet": "1.9.21",
@@ -85,9 +86,9 @@
"@types/mark.js": "8.11.12",
"@types/tabulator-tables": "6.3.1",
"copy-webpack-plugin": "14.0.0",
"happy-dom": "20.9.0",
"happy-dom": "20.8.8",
"lightningcss": "1.32.0",
"script-loader": "0.7.2",
"vite-plugin-static-copy": "4.0.1"
"vite-plugin-static-copy": "3.4.0"
}
}

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" x="0px" y="0px" viewBox="0 0 100 100" enable-background="new 0 0 100 100" xml:space="preserve"><path d="M63.966,45.043c0.008-0.009,0.021-0.021,0.027-0.029c0.938-1.156-0.823-13.453-5.063-20.125 c-1.389-2.186-2.239-3.423-3.219-4.719c-3.907-5.166-6-6.125-6-6.125S35.732,24.78,36.149,44.315 c-1.754,0.065-11.218,7.528-14.826,14.388c-1.206,2.291-1.856,3.645-2.493,5.141c-2.539,5.957-2.33,8.25-2.33,8.25 s16.271,6.79,33.014-3.294c0.007,0.021,0.013,0.046,0.02,0.063c0.537,1.389,12.08,5.979,19.976,5.621 c2.587-0.116,4.084-0.238,5.696-0.444c6.424-0.818,8.298-2.157,8.298-2.157S81.144,54.396,63.966,45.043z M50.787,65.343 c1.059-1.183,4.648-5.853,0.995-11.315c-0.253-0.377-0.496-0.236-0.496-0.236s0.063,10.822-5.162,12.359 c-5.225,1.537-13.886,4.4-20.427,0.455C25,66.186,26.924,53.606,38.544,47.229c0.546,1.599,2.836,6.854,9.292,6.409 c0.453-0.031,0.453-0.313,0.453-0.313s-9.422-5.328-8.156-10.625s3.089-14.236,9.766-17.948c0.714-0.397,10.746,7.593,10.417,20.94 c-1.606-0.319-7.377-1.004-10.226,4.864c-0.198,0.409,0.046,0.549,0.046,0.549s9.31-5.521,13.275-1.789 c3.965,3.733,10.813,9.763,10.71,17.4C74.111,67.533,62.197,72.258,50.787,65.343z M35.613,35.145c0,0-0.991,3.241-0.603,7.524 l-13.393-7.524C21.618,35.145,27.838,30.931,35.613,35.145z M21.193,36.03l13.344,7.612c-3.872,1.872-6.142,4.388-6.142,4.388 C20.78,43.531,21.193,36.03,21.193,36.03z M72.287,49.064c0,0-2.321-2.471-6.23-4.263l13.187-7.881 C79.243,36.92,79.808,44.413,72.287,49.064z M78.687,36.113l-13.237,7.794c0.3-4.291-0.754-7.511-0.754-7.511 C72.383,32.025,78.687,36.113,78.687,36.113z M42.076,73.778c0,0,3.309-0.737,6.845-3.185l0.056,15.361 C48.977,85.955,42.244,82.621,42.076,73.778z M49.956,85.888L50,70.526c3.539,2.445,6.846,3.181,6.846,3.181 C56.686,82.551,49.956,85.888,49.956,85.888z"></path></svg>

Before

Width:  |  Height:  |  Size: 1.8 KiB

View File

@@ -1,17 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg enable-background="new 0 0 256 256" version="1.1" viewBox="0 0 256 256" xml:space="preserve" xmlns="http://www.w3.org/2000/svg">
<title>Trilium Notes</title>
<g>
<path d="m202.9 112.7c-22.5 16.1-54.5 12.8-74.9 6.3l14.8-11.8 14.1-11.3 49.1-39.3-51.2 35.9-14.3 10-14.9 10.5c0.7-21.2 7-49.9 28.6-65.4 1.8-1.3 3.9-2.6 6.1-3.8 2.7-1.5 5.7-2.9 8.8-4.1 27.1-11.1 68.5-15.3 85.2-9.5 0.1 16.2-15.9 45.4-33.9 65.9-2.4 2.8-4.9 5.4-7.4 7.8-3.4 3.5-6.8 6.4-10.1 8.8z" fill="#ab60e3"/>
<path d="m213.1 104c-22.2 12.6-51.4 9.3-70.3 3.2l14.1-11.3 49.1-39.3-51.2 35.9-14.3 10c0.5-18.1 4.9-42.1 19.7-58.6 2.7-1.5 5.7-2.9 8.8-4.1 27.1-11.1 68.5-15.3 85.2-9.5 0.1 16.2-15.9 45.4-33.9 65.9-2.3 2.8-4.8 5.4-7.2 7.8z" fill="#8038b8"/>
<path d="m220.5 96.2c-21.1 8.6-46.6 5.3-63.7-0.2l49.2-39.4-51.2 35.9c0.3-15.8 3.5-36.6 14.3-52.8 27.1-11.1 68.5-15.3 85.2-9.5 0.1 16.2-15.9 45.4-33.8 66z" fill="#560a8f"/>
<path d="m106.7 179c-5.8-21 5.2-43.8 15.5-57.2l4.8 14.2 4.5 13.4 15.9 47-12.8-47.6-3.6-13.2-3.7-13.9c15.5 6.2 35.1 18.6 40.7 38.8 0.5 1.7 0.9 3.6 1.2 5.5 0.4 2.4 0.6 5 0.7 7.7 0.9 23.1-7.1 54.9-15.9 65.7-12-4.3-29.3-24-39.7-42.8-1.4-2.6-2.7-5.1-3.8-7.6-1.6-3.5-2.9-6.8-3.8-10z" fill="#bb9dd2"/>
<path d="m110.4 188.9c-3.4-19.8 6.9-40.5 16.6-52.9l4.5 13.4 15.9 47-12.8-47.6-3.6-13.2c13.3 5.2 29.9 15 38.1 30.4 0.4 2.4 0.6 5 0.7 7.7 0.9 23.1-7.1 54.9-15.9 65.7-12-4.3-29.3-24-39.7-42.8-1.4-2.6-2.7-5.2-3.8-7.7z" fill="#9a6cbc"/>
<path d="m114.2 196.5c-0.7-18 8.6-35.9 17.3-47.1l15.9 47-12.8-47.6c11.6 4.4 26.1 12.4 35.2 24.8 0.9 23.1-7.1 54.9-15.9 65.7-12-4.3-29.3-24-39.7-42.8z" fill="#783ba5"/>
<path d="m86.3 59.1c21.7 10.9 32.4 36.6 35.8 54.9l-15.2-6.6-14.5-6.3-50.6-22 48.8 24.9 13.6 6.9 14.3 7.3c-16.6 7.9-41.3 14.5-62.1 4.1-1.8-0.9-3.6-1.9-5.4-3.2-2.3-1.5-4.5-3.2-6.8-5.1-19.9-16.4-40.3-46.4-42.7-61.5 12.4-6.5 41.5-5.8 64.8-0.3 3.2 0.8 6.2 1.6 9.1 2.5 4 1.3 7.6 2.8 10.9 4.4z" fill="#ab60e3"/>
<path d="m75.4 54.8c18.9 12 28.4 35.6 31.6 52.6l-14.5-6.3-50.6-22 48.7 24.9 13.6 6.9c-14.1 6.8-34.5 13-53.3 8.2-2.3-1.5-4.5-3.2-6.8-5.1-19.8-16.4-40.2-46.4-42.6-61.5 12.4-6.5 41.5-5.8 64.8-0.3 3.1 0.8 6.2 1.6 9.1 2.6z" fill="#8038b8"/>
<path d="m66.3 52.2c15.3 12.8 23.3 33.6 26.1 48.9l-50.6-22 48.8 24.9c-12.2 6-29.6 11.8-46.5 10-19.8-16.4-40.2-46.4-42.6-61.5 12.4-6.5 41.5-5.8 64.8-0.3z" fill="#6f2796"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg enable-background="new 0 0 256 256" version="1.1" viewBox="0 0 256 256" xml:space="preserve" xmlns="http://www.w3.org/2000/svg">
<title>Trilium Notes</title>
<style type="text/css">
.st0{fill:#95C980;}
.st1{fill:#72B755;}
.st2{fill:#4FA52B;}
.st3{fill:#EE8C89;}
.st4{fill:#E96562;}
.st5{fill:#E33F3B;}
.st6{fill:#EFB075;}
.st7{fill:#E99547;}
.st8{fill:#E47B19;}
</style>
<g>
<path class="st0" d="m202.9 112.7c-22.5 16.1-54.5 12.8-74.9 6.3l14.8-11.8 14.1-11.3 49.1-39.3-51.2 35.9-14.3 10-14.9 10.5c0.7-21.2 7-49.9 28.6-65.4 1.8-1.3 3.9-2.6 6.1-3.8 2.7-1.5 5.7-2.9 8.8-4.1 27.1-11.1 68.5-15.3 85.2-9.5 0.1 16.2-15.9 45.4-33.9 65.9-2.4 2.8-4.9 5.4-7.4 7.8-3.4 3.5-6.8 6.4-10.1 8.8z"/>
<path class="st1" d="m213.1 104c-22.2 12.6-51.4 9.3-70.3 3.2l14.1-11.3 49.1-39.3-51.2 35.9-14.3 10c0.5-18.1 4.9-42.1 19.7-58.6 2.7-1.5 5.7-2.9 8.8-4.1 27.1-11.1 68.5-15.3 85.2-9.5 0.1 16.2-15.9 45.4-33.9 65.9-2.3 2.8-4.8 5.4-7.2 7.8z"/>
<path class="st2" d="m220.5 96.2c-21.1 8.6-46.6 5.3-63.7-0.2l49.2-39.4-51.2 35.9c0.3-15.8 3.5-36.6 14.3-52.8 27.1-11.1 68.5-15.3 85.2-9.5 0.1 16.2-15.9 45.4-33.8 66z"/>
<path class="st3" d="m106.7 179c-5.8-21 5.2-43.8 15.5-57.2l4.8 14.2 4.5 13.4 15.9 47-12.8-47.6-3.6-13.2-3.7-13.9c15.5 6.2 35.1 18.6 40.7 38.8 0.5 1.7 0.9 3.6 1.2 5.5 0.4 2.4 0.6 5 0.7 7.7 0.9 23.1-7.1 54.9-15.9 65.7-12-4.3-29.3-24-39.7-42.8-1.4-2.6-2.7-5.1-3.8-7.6-1.6-3.5-2.9-6.8-3.8-10z"/>
<path class="st4" d="m110.4 188.9c-3.4-19.8 6.9-40.5 16.6-52.9l4.5 13.4 15.9 47-12.8-47.6-3.6-13.2c13.3 5.2 29.9 15 38.1 30.4 0.4 2.4 0.6 5 0.7 7.7 0.9 23.1-7.1 54.9-15.9 65.7-12-4.3-29.3-24-39.7-42.8-1.4-2.6-2.7-5.2-3.8-7.7z"/>
<path class="st5" d="m114.2 196.5c-0.7-18 8.6-35.9 17.3-47.1l15.9 47-12.8-47.6c11.6 4.4 26.1 12.4 35.2 24.8 0.9 23.1-7.1 54.9-15.9 65.7-12-4.3-29.3-24-39.7-42.8z"/>
<path class="st6" d="m86.3 59.1c21.7 10.9 32.4 36.6 35.8 54.9l-15.2-6.6-14.5-6.3-50.6-22 48.8 24.9 13.6 6.9 14.3 7.3c-16.6 7.9-41.3 14.5-62.1 4.1-1.8-0.9-3.6-1.9-5.4-3.2-2.3-1.5-4.5-3.2-6.8-5.1-19.9-16.4-40.3-46.4-42.7-61.5 12.4-6.5 41.5-5.8 64.8-0.3 3.2 0.8 6.2 1.6 9.1 2.5 4 1.3 7.6 2.8 10.9 4.4z"/>
<path class="st7" d="m75.4 54.8c18.9 12 28.4 35.6 31.6 52.6l-14.5-6.3-50.6-22 48.7 24.9 13.6 6.9c-14.1 6.8-34.5 13-53.3 8.2-2.3-1.5-4.5-3.2-6.8-5.1-19.8-16.4-40.2-46.4-42.6-61.5 12.4-6.5 41.5-5.8 64.8-0.3 3.1 0.8 6.2 1.6 9.1 2.6z"/>
<path class="st8" d="m66.3 52.2c15.3 12.8 23.3 33.6 26.1 48.9l-50.6-22 48.8 24.9c-12.2 6-29.6 11.8-46.5 10-19.8-16.4-40.2-46.4-42.6-61.5 12.4-6.5 41.5-5.8 64.8-0.3z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.5 KiB

View File

@@ -1,11 +1,10 @@
import type { CKTextEditor } from "@triliumnext/ckeditor5";
import type CodeMirror from "@triliumnext/codemirror";
import { type LOCALE_IDS, SqlExecuteResponse } from "@triliumnext/commons";
import { SqlExecuteResponse } from "@triliumnext/commons";
import type { NativeImage, TouchBar } from "electron";
import { ColumnComponent } from "tabulator-tables";
import type { Attribute } from "../services/attribute_parser.js";
import bundleService from "../services/bundle.js";
import froca from "../services/froca.js";
import { initLocale, t } from "../services/i18n.js";
import keyboardActionsService from "../services/keyboard_actions.js";
@@ -24,7 +23,6 @@ import { IncludeNoteOpts } from "../widgets/dialogs/include_note.jsx";
import type { InfoProps } from "../widgets/dialogs/info.jsx";
import type { MarkdownImportOpts } from "../widgets/dialogs/markdown_import.jsx";
import { ChooseNoteTypeCallback } from "../widgets/dialogs/note_type_chooser.jsx";
import type { PrintPreviewData } from "../widgets/dialogs/print_preview.jsx";
import type { PromptDialogOptions } from "../widgets/dialogs/prompt.js";
import type NoteTreeWidget from "../widgets/note_tree.js";
import Component from "./component.js";
@@ -281,7 +279,6 @@ export type CommandMappings = {
backInNoteHistory: CommandData;
forwardInNoteHistory: CommandData;
forceSaveRevision: CommandData;
saveNamedRevision: CommandData;
scrollToActiveNote: CommandData;
quickSearch: CommandData;
collapseTree: CommandData;
@@ -305,7 +302,6 @@ export type CommandMappings = {
ninthTab: CommandData;
lastTab: CommandData;
showNoteSource: CommandData;
showNoteOCRText: CommandData;
showSQLConsole: CommandData;
showBackendLog: CommandData;
showCheatsheet: CommandData;
@@ -332,7 +328,6 @@ export type CommandMappings = {
toggleRightPane: CommandData;
printActiveNote: CommandData;
exportAsPdf: CommandData;
showPrintPreview: PrintPreviewData;
openNoteExternally: CommandData;
openNoteCustom: CommandData;
openNoteOnServer: CommandData;
@@ -513,7 +508,7 @@ type EventMappings = {
contentSafeMarginChanged: {
top: number;
noteContext: NoteContext;
};
}
};
export type EventListener<T extends EventNames> = {
@@ -567,7 +562,7 @@ export class AppContext extends Component {
*/
async earlyInit() {
await options.initializedPromise;
await initLocale((options.get("locale") || "en") as LOCALE_IDS);
await initLocale();
}
setLayout(layout: Layout) {
@@ -582,6 +577,7 @@ export class AppContext extends Component {
this.tabManager.loadTabs();
const bundleService = (await import("../services/bundle.js")).default;
setTimeout(() => bundleService.executeStartupBundles(), 2000);
}

View File

@@ -1,7 +1,6 @@
import { CreateChildrenResponse, SqlExecuteResponse } from "@triliumnext/commons";
import bundleService from "../services/bundle.js";
import dialog from "../services/dialog.js";
import dateNoteService from "../services/date_notes.js";
import froca from "../services/froca.js";
import { t } from "../services/i18n.js";
@@ -217,21 +216,4 @@ export default class Entrypoints extends Component {
toastService.showMessage(t("entrypoints.note-revision-created"));
}
async saveNamedRevisionCommand() {
const noteId = appContext.tabManager.getActiveContextNoteId();
if (!noteId) return;
const name = await dialog.prompt({
title: t("entrypoints.save-named-revision-title"),
message: t("entrypoints.save-named-revision-message"),
defaultValue: ""
});
// null means the user cancelled
if (name === null) return;
await server.post(`notes/${noteId}/revision`, { description: name || undefined });
toastService.showMessage(t("entrypoints.note-revision-created"));
}
}

View File

@@ -25,15 +25,6 @@ export type GetTextEditorCallback = (editor: CKTextEditor) => void;
export type SaveState = "saved" | "saving" | "unsaved" | "error";
const READ_ONLY_CAPABLE_TYPES: string[] = [
"text",
"code",
"mermaid",
"canvas",
"mindMap",
"spreadsheet"
];
export interface NoteContextDataMap {
toc: HeadingContext;
pdfPages: {
@@ -312,12 +303,8 @@ class NoteContext extends Component implements EventListener<"entitiesReloaded">
return false;
}
if (!this.note) {
return false;
}
// Note types that support a read-only state (via the #readOnly label, source view, or auto-readonly).
if (!READ_ONLY_CAPABLE_TYPES.includes(this.note.type)) {
// "readOnly" is a state valid only for text/code notes
if (!this.note || (this.note.type !== "text" && this.note.type !== "code")) {
return false;
}
@@ -333,11 +320,6 @@ class NoteContext extends Component implements EventListener<"entitiesReloaded">
return true;
}
// Auto read-only based on content size is only configurable for text/code.
if (this.note.type !== "text" && this.note.type !== "code") {
return false;
}
// Store the initial decision about read-only status in the viewScope
// This will be "remembered" until the viewScope is refreshed
if (!this.viewScope) {

View File

@@ -148,19 +148,6 @@ export default class RootCommandExecutor extends Component {
}
}
async showNoteOCRTextCommand() {
const notePath = appContext.tabManager.getActiveContextNotePath();
if (notePath) {
await appContext.tabManager.openTabWithNoteWithHoisting(notePath, {
activate: true,
viewScope: {
viewMode: "ocr"
}
});
}
}
async showAttachmentsCommand() {
const notePath = appContext.tabManager.getActiveContextNotePath();

View File

@@ -54,7 +54,7 @@ function initOnElectron() {
const currentWindow = electronRemote.getCurrentWindow();
const style = window.getComputedStyle(document.body);
initDarkOrLightMode();
initDarkOrLightMode(style);
initTransparencyEffects(style, currentWindow);
initFullScreenDetection(currentWindow);
@@ -119,11 +119,11 @@ function initTransparencyEffects(style: CSSStyleDeclaration, currentWindow: Elec
*
* @param style the root CSS element to read variables from.
*/
function initDarkOrLightMode() {
function initDarkOrLightMode(style: CSSStyleDeclaration) {
let themeSource: typeof nativeTheme.themeSource = "system";
const themeStyle = window.glob.getThemeStyle();
if (themeStyle !== "auto") {
const themeStyle = style.getPropertyValue("--theme-style");
if (style.getPropertyValue("--theme-style-auto") !== "true" && (themeStyle === "light" || themeStyle === "dark")) {
themeSource = themeStyle;
}

View File

@@ -66,15 +66,7 @@ class FAttribute {
}
get isAutoLink() {
if (this.type === "relation") {
return ["internalLink", "imageLink", "relationMapLink", "includeNoteLink"].includes(this.name);
}
if (this.type === "label") {
return this.name === "internalBookmark";
}
return false;
return this.type === "relation" && ["internalLink", "imageLink", "relationMapLink", "includeNoteLink"].includes(this.name);
}
get toString() {

View File

@@ -1,6 +1,5 @@
import { getNoteIcon } from "@triliumnext/commons";
import bundleService from "../services/bundle.js";
import cssClassManager from "../services/css_class_manager.js";
import type { Froca } from "../services/froca-interface.js";
import noteAttributeCache from "../services/note_attribute_cache.js";
@@ -19,7 +18,7 @@ const RELATION = "relation";
* end user. Those types should be used only for checking against, they are
* not for direct use.
*/
export type NoteType = "file" | "image" | "search" | "noteMap" | "launcher" | "doc" | "contentWidget" | "text" | "relationMap" | "render" | "canvas" | "mermaid" | "book" | "webView" | "code" | "mindMap" | "spreadsheet" | "llmChat";
export type NoteType = "file" | "image" | "search" | "noteMap" | "launcher" | "doc" | "contentWidget" | "text" | "relationMap" | "render" | "canvas" | "mermaid" | "book" | "webView" | "code" | "mindMap" | "spreadsheet";
export interface NotePathRecord {
isArchived: boolean;
@@ -236,16 +235,6 @@ export default class FNote {
return this.hasAttribute("label", "archived");
}
/**
* Returns true if the note's metadata (title, icon) should not be editable.
* This applies to system notes like options, help, and launch bar configuration.
*/
get isMetadataReadOnly() {
return utils.isLaunchBarConfig(this.noteId)
|| this.noteId.startsWith("_help_")
|| this.noteId.startsWith("_options");
}
getChildNoteIds() {
return this.children;
}
@@ -1025,6 +1014,7 @@ export default class FNote {
const env = this.getScriptEnv();
if (env === "frontend") {
const bundleService = (await import("../services/bundle.js")).default;
return await bundleService.getAndExecuteBundle(this.noteId);
} else if (env === "backend") {
await server.post(`script/run/${this.noteId}`);
@@ -1069,10 +1059,6 @@ export default class FNote {
return this.mime === "text/x-sqlite;schema=trilium";
}
isMarkdown() {
return this.type === "code" && (this.mime === "text/markdown" || this.mime === "text/x-markdown" || this.mime === "text/x-gfm");
}
isTriliumScript() {
return this.mime.startsWith("application/javascript");
}

View File

@@ -1,5 +1,3 @@
import { getThemeStyle } from "./services/theme";
async function bootstrap() {
showSplash();
await setupGlob();
@@ -41,7 +39,6 @@ async function setupGlob() {
activeDialog: null,
device: json.device || getDevice()
};
window.glob.getThemeStyle = getThemeStyle;
}
function getDevice() {
@@ -79,65 +76,31 @@ async function loadBootstrapCss() {
}
}
type StylesheetRef = {
href: string;
media?: string;
};
function getConfiguredThemeStylesheets(stylesheetsPath: string, theme: string, customThemeCssUrl?: string) {
if (theme === "auto") {
return [{ href: `${stylesheetsPath}/theme-dark.css`, media: "(prefers-color-scheme: dark)" }];
}
if (theme === "dark") {
return [{ href: `${stylesheetsPath}/theme-dark.css` }];
}
if (theme === "next") {
return [
{ href: `${stylesheetsPath}/theme-next-light.css` },
{ href: `${stylesheetsPath}/theme-next-dark.css`, media: "(prefers-color-scheme: dark)" }
];
}
if (theme === "next-light") {
return [{ href: `${stylesheetsPath}/theme-next-light.css` }];
}
if (theme === "next-dark") {
return [{ href: `${stylesheetsPath}/theme-next-dark.css` }];
}
if (theme !== "light" && customThemeCssUrl) {
return [{ href: customThemeCssUrl }];
}
return [];
}
function loadStylesheets() {
const { device, assetPath, theme, themeBase, customThemeCssUrl } = window.glob;
const stylesheetsPath = `${assetPath}/stylesheets`;
const { device, assetPath, themeCssUrl, themeUseNextAsBase } = window.glob;
const cssToLoad: StylesheetRef[] = [];
const cssToLoad: string[] = [];
if (device !== "print") {
cssToLoad.push({ href: `${stylesheetsPath}/ckeditor-theme.css` });
cssToLoad.push({ href: `api/fonts` });
cssToLoad.push({ href: `${stylesheetsPath}/theme-light.css` });
cssToLoad.push(...getConfiguredThemeStylesheets(stylesheetsPath, theme, customThemeCssUrl));
if (themeBase) {
cssToLoad.push(...getConfiguredThemeStylesheets(stylesheetsPath, themeBase));
cssToLoad.push(`${assetPath}/stylesheets/ckeditor-theme.css`);
cssToLoad.push(`api/fonts`);
cssToLoad.push(`${assetPath}/stylesheets/theme-light.css`);
if (themeCssUrl) {
cssToLoad.push(themeCssUrl);
}
cssToLoad.push({ href: `${stylesheetsPath}/style.css` });
if (themeUseNextAsBase === "next") {
cssToLoad.push(`${assetPath}/stylesheets/theme-next.css`);
} else if (themeUseNextAsBase === "next-dark") {
cssToLoad.push(`${assetPath}/stylesheets/theme-next-dark.css`);
} else if (themeUseNextAsBase === "next-light") {
cssToLoad.push(`${assetPath}/stylesheets/theme-next-light.css`);
}
cssToLoad.push(`${assetPath}/stylesheets/style.css`);
}
for (const { href, media } of cssToLoad) {
for (const href of cssToLoad) {
const linkEl = document.createElement("link");
linkEl.href = href;
linkEl.rel = "stylesheet";
if (media) {
linkEl.media = media;
}
document.head.appendChild(linkEl);
}
}

View File

@@ -91,7 +91,6 @@ export default class DesktopLayout {
.optChild(launcherPaneIsHorizontal, <LeftPaneToggle isHorizontalLayout={true} />)
.child(<TabHistoryNavigationButtons />)
.child(new TabRowWidget().class("full-width"))
.optChild(glob.isStandalone, <StandaloneWarningBar />)
.optChild(isNewLayout, <RightPaneToggle />)
.optChild(customTitleBarButtons, <TitleBarButtons />)
.css("height", "40px")
@@ -119,7 +118,6 @@ export default class DesktopLayout {
.class("tab-row-container")
.child(<TabHistoryNavigationButtons />)
.child(new TabRowWidget())
.optChild(glob.isStandalone, <StandaloneWarningBar />)
.optChild(isNewLayout, <RightPaneToggle />)
.optChild(customTitleBarButtons, <TitleBarButtons />)
.css("height", "40px")
@@ -189,6 +187,7 @@ export default class DesktopLayout {
)
)
.optChild(launcherPaneIsHorizontal && isNewLayout, <StatusBar />)
.optChild(glob.isStandalone, <StandaloneWarningBar />)
.child(<CloseZenModeButton />)
// Desktop-specific dialogs.

View File

@@ -24,7 +24,6 @@ import InfoDialog from "../widgets/dialogs/info.js";
import IncorrectCpuArchDialog from "../widgets/dialogs/incorrect_cpu_arch.js";
import CallToActionDialog from "../widgets/dialogs/call_to_action.jsx";
import PopupEditorDialog from "../widgets/dialogs/PopupEditor.jsx";
import PrintPreviewDialog from "../widgets/dialogs/print_preview.jsx";
import ToastContainer from "../widgets/Toast.jsx";
export function applyModals(rootContainer: RootContainer) {
@@ -52,7 +51,6 @@ export function applyModals(rootContainer: RootContainer) {
.child(<PromptDialog />)
.child(<IncorrectCpuArchDialog />)
.child(<PopupEditorDialog />)
.child(<PrintPreviewDialog />)
.child(<CallToActionDialog />)
.child(<ToastContainer />);
}

View File

@@ -27,7 +27,7 @@ kbd {
max-width: 350px;
}
/* #region Tree (non-standalone mobile: Fancytree-based) */
/* #region Tree */
.tree-wrapper {
max-height: 100%;
margin-top: 0px;

View File

@@ -1,7 +1,6 @@
import "./mobile_layout.css";
import type AppContext from "../components/app_context.js";
import { isMobileApp } from "../services/utils";
import GlobalMenuWidget from "../widgets/buttons/global_menu.js";
import CloseZenModeButton from "../widgets/close_zen_button.js";
import NoteList from "../widgets/collections/NoteList.jsx";
@@ -16,7 +15,6 @@ import NoteBadges from "../widgets/layout/NoteBadges.jsx";
import NoteTitleActions from "../widgets/layout/NoteTitleActions.jsx";
import StandaloneWarningBar from "../widgets/layout/StandaloneWarningBar";
import MobileDetailMenu from "../widgets/mobile_widgets/mobile_detail_menu.js";
import MobileNoteNavigator from "../widgets/mobile_widgets/MobileNoteNavigator.jsx";
import ScreenContainer from "../widgets/mobile_widgets/screen_container.js";
import SidebarContainer from "../widgets/mobile_widgets/sidebar_container.js";
import ToggleSidebarButton from "../widgets/mobile_widgets/toggle_sidebar_button.jsx";
@@ -49,13 +47,7 @@ export default class MobileLayout {
.css("padding-inline-start", "0")
.css("padding-inline-end", "0")
.css("contain", "content")
.child(
new FlexContainer("column")
.filling()
.id("mobile-sidebar-wrapper")
.child(new QuickSearchWidget())
.child(glob.isStandalone ? <MobileNoteNavigator /> : new NoteTreeWidget())
)
.child(new FlexContainer("column").filling().id("mobile-sidebar-wrapper").child(new QuickSearchWidget()).child(new NoteTreeWidget()))
)
.child(
new ScreenContainer("detail", "row")
@@ -64,6 +56,7 @@ export default class MobileLayout {
.child(
new SplitNoteContainer(() =>
new NoteWrapperWidget()
.optChild(glob.isStandalone, <StandaloneWarningBar />)
.child(
new FlexContainer("row")
.class("title-row note-split-title")
@@ -73,8 +66,6 @@ export default class MobileLayout {
.child(<NoteIconWidget />)
.child(<NoteTitleWidget />)
.child(<NoteBadges />)
.optChild(isMobileApp(), <StandaloneWarningBar variant="mobile" />)
.optChild(glob.isStandalone && !isMobileApp(), <StandaloneWarningBar />)
.child(<MobileDetailMenu />)
)
.child(

View File

@@ -39,7 +39,6 @@ export interface MenuCommandItem<T> {
title: string;
command?: T;
type?: string;
mime?: string;
/**
* The icon to display in the menu item.
*

View File

@@ -1,14 +1,12 @@
import type { BrowserWindow } from "electron";
import type { CommandNames } from "../components/app_context.js";
import appContext from "../components/app_context.js";
import zoomService from "../components/zoom.js";
import * as clipboardExt from "../services/clipboard_ext.js";
import { t } from "../services/i18n.js";
import options from "../services/options.js";
import server from "../services/server.js";
import utils from "../services/utils.js";
import options from "../services/options.js";
import zoomService from "../components/zoom.js";
import contextMenu, { type MenuItem } from "./context_menu.js";
import { t } from "../services/i18n.js";
import server from "../services/server.js";
import * as clipboardExt from "../services/clipboard_ext.js";
import type { BrowserWindow } from "electron";
import type { CommandNames, AppContext } from "../components/app_context.js";
function setupContextMenu() {
const electron = utils.dynamicRequire("electron");
@@ -17,6 +15,8 @@ function setupContextMenu() {
// FIXME: Remove typecast once Electron is properly integrated.
const { webContents } = remote.getCurrentWindow() as BrowserWindow;
let appContext: AppContext;
webContents.on("context-menu", (event, params) => {
const { editFlags } = params;
const hasText = params.selectionText.trim().length > 0;
@@ -38,7 +38,7 @@ function setupContextMenu() {
items.push({
title: t("electron_context_menu.add-term-to-dictionary", { term: params.misspelledWord }),
uiIcon: "bx bx-plus",
handler: () => electron.ipcRenderer.send("add-word-to-dictionary", params.misspelledWord)
handler: () => webContents.session.addWordToSpellCheckerDictionary(params.misspelledWord)
});
items.push({ kind: "separator" });
@@ -141,7 +141,7 @@ function setupContextMenu() {
}
// Replace the placeholder with the real search keyword.
const searchUrl = searchEngineUrl.replace("{keyword}", encodeURIComponent(params.selectionText));
let searchUrl = searchEngineUrl.replace("{keyword}", encodeURIComponent(params.selectionText));
items.push({ kind: "separator" });
@@ -155,6 +155,10 @@ function setupContextMenu() {
title: t("electron_context_menu.search_in_trilium", { term: shortenedSelection }),
uiIcon: "bx bx-search",
handler: async () => {
if (!appContext) {
appContext = (await import("../components/app_context.js")).default;
}
await appContext.triggerCommand("searchNotes", {
searchString: params.selectionText
});

View File

@@ -1,101 +0,0 @@
import type { ToggleInParentResponse } from "@triliumnext/commons";
import type FNote from "../entities/fnote.js";
import branchService from "../services/branches.js";
import { t } from "../services/i18n.js";
import server from "../services/server.js";
import toast from "../services/toast.js";
import contextMenu, { type ContextMenuEvent, type MenuItem } from "./context_menu.js";
const VISIBLE_LAUNCHER_PARENTS = ["_lbVisibleLaunchers", "_lbMobileVisibleLaunchers"];
function getVisibleLauncherBranch(launcherNote: FNote) {
return launcherNote.getParentBranches().find((b) => VISIBLE_LAUNCHER_PARENTS.includes(b.parentNoteId));
}
function getBookmarkBranch(launcherNote: FNote) {
return launcherNote.getParentBranches().find((b) => b.parentNoteId === "_lbBookmarks");
}
async function removeFromLaunchBar(launcherNote: FNote) {
const bookmarkBranch = getBookmarkBranch(launcherNote);
if (bookmarkBranch) {
// Individual bookmarks are represented via a branch under `_lbBookmarks`; removing them
// from the launch bar is the same as unbookmarking the note.
const resp = await server.put<ToggleInParentResponse>(
`notes/${launcherNote.noteId}/toggle-in-parent/_lbBookmarks/false`
);
if (!resp.success && resp.message) {
toast.showError(resp.message);
}
return;
}
const launcherBranch = getVisibleLauncherBranch(launcherNote);
if (!launcherBranch) return;
const isMobileLauncher = launcherBranch.parentNoteId === "_lbMobileVisibleLaunchers";
// Branch IDs in the hidden subtree follow the `${parentNoteId}_${noteId}` convention,
// so the branch linking `_lb(Mobile)?Root` to the "available" launchers root is predictable.
const targetBranchId = isMobileLauncher
? "_lbMobileRoot__lbMobileAvailableLaunchers"
: "_lbRoot__lbAvailableLaunchers";
await branchService.moveToParentNote([launcherBranch.branchId], targetBranchId);
}
export function canRemoveFromLaunchBar(launcherNote: FNote | null | undefined) {
if (!launcherNote) return false;
return !!(getVisibleLauncherBranch(launcherNote) || getBookmarkBranch(launcherNote));
}
export interface ShowLauncherContextMenuOptions<T extends string> {
/** Menu items specific to this launcher (e.g. "Open in new tab" for note-based launchers). They appear above the "Remove from launch bar" item. */
extraItems?: MenuItem<T>[];
/** Handler for the {@link extraItems}. The "Remove from launch bar" item is handled internally and will not be forwarded. */
onCommand?: (command: T | undefined) => void;
}
const REMOVE_COMMAND = "__removeFromLaunchBar__";
/**
* Displays the launch bar icon context menu. When the launcher can be removed (i.e. it is a direct
* child of the visible launchers root or of `_lbBookmarks`), a "Remove from launch bar" entry is
* appended. Extra items can be supplied to preserve launcher-specific actions (e.g. "Open in new tab").
*/
export async function showLauncherContextMenu<T extends string>(
launcherNote: FNote | null | undefined,
e: ContextMenuEvent,
options: ShowLauncherContextMenuOptions<T> = {}
) {
e.preventDefault();
const items = [...(options.extraItems ?? [])] as MenuItem<string>[];
if (canRemoveFromLaunchBar(launcherNote)) {
if (items.length > 0) {
items.push({ kind: "separator" });
}
items.push({
title: t("launcher_button_context_menu.remove_from_launch_bar"),
command: REMOVE_COMMAND,
uiIcon: "bx bx-x-circle"
});
}
if (items.length === 0) return;
contextMenu.show<string>({
x: e.pageX ?? 0,
y: e.pageY ?? 0,
items,
selectMenuItemHandler: ({ command }) => {
if (command === REMOVE_COMMAND) {
if (launcherNote) {
void removeFromLaunchBar(launcherNote);
}
return;
}
options.onCommand?.(command as T | undefined);
}
});
}

View File

@@ -288,7 +288,7 @@ export default class TreeContextMenu implements SelectMenuItemEventListener<Tree
return items.filter((row) => row !== null) as MenuItem<TreeCommandNames>[];
}
async selectMenuItemHandler({ command, type, mime, templateNoteId }: MenuCommandItem<TreeCommandNames>) {
async selectMenuItemHandler({ command, type, templateNoteId }: MenuCommandItem<TreeCommandNames>) {
const notePath = treeService.getNotePath(this.node);
if (utils.isMobile()) {
@@ -305,7 +305,6 @@ export default class TreeContextMenu implements SelectMenuItemEventListener<Tree
target: "after",
targetBranchId: this.node.data.branchId,
type,
mime,
isProtected,
templateNoteId
});
@@ -314,7 +313,6 @@ export default class TreeContextMenu implements SelectMenuItemEventListener<Tree
noteCreateService.createNote(parentNotePath, {
type,
mime,
isProtected: this.node.data.isProtected,
templateNoteId
});

View File

@@ -2,17 +2,18 @@
:root {
--print-font-size: 11pt;
--ck-content-color-image-caption-background: transparent !important;
}
html,
body {
--print-font-family: var(--detail-font-family, sans-serif);
width: 100%;
height: 100%;
color: black;
font-family: var(--print-font-family);
}
@page {
margin: 2cm;
}
.note-list-widget.full-height,
@@ -25,12 +26,6 @@ body {
}
body[data-note-type="text"] .ck-content {
--ck-content-font-family: var(--print-font-family);
--ck-content-font-size: var(--print-font-size);
--ck-content-font-color: black;
--ck-content-line-height: 1.5;
--ck-content-color-image-caption-background: transparent;
font-size: var(--print-font-size);
text-align: justify;
}
@@ -159,4 +154,4 @@ span[style] {
.page-break::after {
display: none !important;
}
/* #endregion */
/* #endregion */

View File

@@ -4,7 +4,6 @@ import { useCallback, useLayoutEffect, useRef } from "preact/hooks";
import FNote from "./entities/fnote";
import content_renderer from "./services/content_renderer";
import { applyInlineMermaid } from "./services/content_renderer_text";
import froca from "./services/froca";
import { dynamicRequire, isElectron } from "./services/utils";
import { CustomNoteList, useNoteViewType } from "./widgets/collections/NoteList";
@@ -31,21 +30,7 @@ async function main() {
if (!noteId) return;
await import("./print.css");
// Browser printing relies on @page margins since there's no programmatic control.
// Electron uses printToPDF() margins instead, so we only inject this for the browser path.
if (!isElectron()) {
const style = document.createElement("style");
style.textContent = "@page { margin: 2cm; }";
document.head.appendChild(style);
}
// Load the user's font preferences so that --detail-font-family is available.
const fontLink = document.createElement("link");
fontLink.rel = "stylesheet";
fontLink.href = "api/fonts";
document.head.appendChild(fontLink);
const froca = (await import("./services/froca")).default;
const note = await froca.getNote(noteId);
const bodyWrapper = document.createElement("div");
@@ -120,9 +105,6 @@ function SingleNoteRenderer({ note, onReady }: RendererProps) {
// Check custom CSS.
await loadCustomCss(note);
// Wait for all fonts (including those from custom CSS) to finish loading.
await document.fonts.ready;
}
load().then(() => requestAnimationFrame(() => onReady({
@@ -148,7 +130,6 @@ function CollectionRenderer({ note, onReady, onProgressChanged }: RendererProps)
media="print"
onReady={async (data: PrintReport) => {
await loadCustomCss(note);
await document.fonts.ready;
onReady(data);
}}
onProgressChanged={onProgressChanged}

View File

@@ -7,10 +7,6 @@ async function renderAttribute(attribute: FAttribute, renderIsInheritable: boole
const isInheritable = renderIsInheritable && attribute.isInheritable ? `(inheritable)` : "";
const $attr = $("<span>");
if (attribute.isAutoLink) {
return $attr;
}
if (attribute.type === "label") {
$attr.append(document.createTextNode(`#${attribute.name}${isInheritable}`));
@@ -19,6 +15,9 @@ async function renderAttribute(attribute: FAttribute, renderIsInheritable: boole
$attr.append(document.createTextNode(formatValue(attribute.value)));
}
} else if (attribute.type === "relation") {
if (attribute.isAutoLink) {
return $attr;
}
// when the relation has just been created, then it might not have a value
if (attribute.value) {

View File

@@ -6,8 +6,10 @@ import froca from "./froca";
import server from "./server.js";
// Spy on server methods to track calls
server.put = vi.fn(async () => ({})) as typeof server.put;
server.remove = vi.fn(async () => ({})) as typeof server.remove;
// @ts-expect-error the generic typing is causing issues here
server.put = vi.fn(async <T> (url: string, data?: T) => ({} as T));
// @ts-expect-error the generic typing is causing issues here
server.remove = vi.fn(async <T> (url: string) => ({} as T));
describe("Set boolean with inheritance", () => {
beforeEach(() => {

View File

@@ -120,7 +120,7 @@ async function deleteNotes(branchIdsToDelete: string[], forceDeleteAllClones = f
if (moveToParent) {
try {
await activateParentNotePath(branchIdsToDelete);
await activateParentNotePath();
} catch (e) {
console.error(e);
}
@@ -152,28 +152,13 @@ async function deleteNotes(branchIdsToDelete: string[], forceDeleteAllClones = f
return true;
}
async function activateParentNotePath(branchIdsToDelete: string[]) {
async function activateParentNotePath() {
// this is not perfect, maybe we should find the next/previous sibling, but that's more complex
const activeContext = appContext.tabManager.getActiveContext();
const activeNotePath = activeContext?.notePathArray ?? [];
const parentNotePathArr = activeContext?.notePathArray.slice(0, -1);
// Find the deleted branch that appears earliest in the active note's path
let earliestIndex = activeNotePath.length;
for (const branchId of branchIdsToDelete) {
const branch = froca.getBranch(branchId);
if (branch) {
const index = activeNotePath.indexOf(branch.noteId);
if (index !== -1 && index < earliestIndex) {
earliestIndex = index;
}
}
}
// Navigate to the parent of the highest deleted ancestor
if (earliestIndex < activeNotePath.length) {
const parentPath = activeNotePath.slice(0, earliestIndex);
if (parentPath.length > 0) {
await activeContext?.setNote(parentPath.join("/"));
}
if (parentNotePathArr && parentNotePathArr.length > 0) {
activeContext?.setNote(parentNotePathArr.join("/"));
}
}

View File

@@ -1,7 +1,5 @@
import { ScriptParams } from "@triliumnext/commons";
import { h, VNode } from "preact";
import FNote from "../entities/fnote.js";
import BasicWidget, { ReactWrappedWidget } from "../widgets/basic_widget.js";
import RightPanelWidget from "../widgets/right_panel_widget.js";
import type { Entity } from "./frontend_script_api.js";
@@ -28,7 +26,7 @@ type WithNoteId<T> = T & {
};
export type Widget = WithNoteId<(LegacyWidget | WidgetDefinitionWithType)>;
async function getAndExecuteBundle(noteId: string, originEntity: FNote | null = null, script: string | null = null, params: ScriptParams | null = null) {
async function getAndExecuteBundle(noteId: string, originEntity = null, script = null, params = null) {
const bundle = await server.post<Bundle>(`script/bundle/${noteId}`, {
script,
params

View File

@@ -1,6 +1,3 @@
import { t } from "./i18n.js";
import toast from "./toast.js";
export function copyText(text: string) {
if (!text) {
return;
@@ -9,26 +6,29 @@ export function copyText(text: string) {
if (navigator.clipboard) {
navigator.clipboard.writeText(text);
return true;
}
// Fallback method: https://stackoverflow.com/a/72239825
const textArea = document.createElement("textarea");
textArea.value = text;
try {
document.body.appendChild(textArea);
textArea.focus();
textArea.select();
return document.execCommand('copy');
} finally {
document.body.removeChild(textArea);
} else {
// Fallback method: https://stackoverflow.com/a/72239825
const textArea = document.createElement("textarea");
textArea.value = text;
try {
document.body.appendChild(textArea);
textArea.focus();
textArea.select();
return document.execCommand('copy');
} finally {
document.body.removeChild(textArea);
}
}
} catch (e) {
console.warn(e);
return false;
}
}
export function copyTextWithToast(text: string) {
export async function copyTextWithToast(text: string) {
const t = (await import("./i18n.js")).t;
const toast = (await import("./toast.js")).default;
if (copyText(text)) {
toast.showMessage(t("clipboard.copy_success"));
} else {

View File

@@ -1,7 +1,6 @@
import "./content_renderer.css";
import { normalizeMimeTypeForCKEditor, renderToHtml, type TextRepresentationResponse } from "@triliumnext/commons";
import DOMPurify from "dompurify";
import { normalizeMimeTypeForCKEditor } from "@triliumnext/commons";
import { h, render } from "preact";
import WheelZoom from 'vanilla-js-wheel-zoom';
@@ -9,14 +8,13 @@ import FAttachment from "../entities/fattachment.js";
import FNote from "../entities/fnote.js";
import imageContextMenuService from "../menus/image_context_menu.js";
import { t } from "../services/i18n.js";
import renderText, { postProcessRichContent, renderChildrenList } from "./content_renderer_text.js";
import renderText from "./content_renderer_text.js";
import renderDoc from "./doc_renderer.js";
import { loadElkIfNeeded, postprocessMermaidSvg } from "./mermaid.js";
import openService from "./open.js";
import protectedSessionService from "./protected_session.js";
import protectedSessionHolder from "./protected_session_holder.js";
import renderService from "./render.js";
import server from "./server.js";
import { applySingleBlockSyntaxHighlight } from "./syntax_highlight.js";
import utils, { getErrorMessage } from "./utils.js";
@@ -34,7 +32,6 @@ export interface RenderOptions {
includeArchivedNotes?: boolean;
/** Set of note IDs that have already been seen during rendering to prevent infinite recursion. */
seenNoteIds?: Set<string>;
showTextRepresentation?: boolean;
}
const CODE_MIME_TYPES = new Set(["application/json"]);
@@ -55,14 +52,12 @@ export async function getRenderedContent(this: {} | { ctx: string }, entity: FNo
if (type === "text" || type === "book") {
await renderText(entity, $renderedContent, options);
} else if (type === "markdown") {
await renderMarkdown(entity, $renderedContent, options);
} else if (type === "code") {
await renderCode(entity, $renderedContent);
} else if (["image", "canvas", "mindMap", "spreadsheet"].includes(type)) {
await renderImage(entity, $renderedContent, options);
renderImage(entity, $renderedContent, options);
} else if (!options.tooltip && ["file", "pdf", "audio", "video"].includes(type)) {
await renderFile(entity, type, $renderedContent, options);
await renderFile(entity, type, $renderedContent);
} else if (type === "mermaid") {
await renderMermaid(entity, $renderedContent);
} else if (type === "render" && entity instanceof FNote) {
@@ -122,31 +117,6 @@ export async function getRenderedContent(this: {} | { ctx: string }, entity: FNo
};
}
/**
* Renders a markdown note by converting its source to CKEditor-compatible HTML,
* then running the same post-render pipeline as text notes (included notes,
* math, reference links, Mermaid, code highlight) so the preview matches what
* the user sees in the Markdown note type's preview pane.
*/
async function renderMarkdown(note: FNote | FAttachment, $renderedContent: JQuery<HTMLElement>, options: RenderOptions) {
const blob = await note.getBlob();
const source = blob?.content ?? "";
if (!source.trim()) {
if (note instanceof FNote && !options.noChildrenList) {
await renderChildrenList($renderedContent, note, options.includeArchivedNotes ?? false);
}
return;
}
const html = renderToHtml(source, note.title, {
sanitize: (dirty) => DOMPurify.sanitize(dirty),
wikiLink: { formatHref: (id) => `#root/${id}` }
});
$renderedContent.append($('<div class="ck-content">').html(html));
await postProcessRichContent(note, $renderedContent, options);
}
/**
* Renders a code note, by displaying its content and applying syntax highlighting based on the selected MIME type.
*/
@@ -168,7 +138,7 @@ async function renderCode(note: FNote | FAttachment, $renderedContent: JQuery<HT
await applySingleBlockSyntaxHighlight($codeBlock, normalizeMimeTypeForCKEditor(note.mime));
}
async function renderImage(entity: FNote | FAttachment, $renderedContent: JQuery<HTMLElement>, options: RenderOptions = {}) {
function renderImage(entity: FNote | FAttachment, $renderedContent: JQuery<HTMLElement>, options: RenderOptions = {}) {
const encodedTitle = encodeURIComponent(entity.title);
let url;
@@ -176,14 +146,13 @@ async function renderImage(entity: FNote | FAttachment, $renderedContent: JQuery
if (entity instanceof FNote) {
url = `api/images/${entity.noteId}/${encodedTitle}?${Math.random()}`;
} else if (entity instanceof FAttachment) {
url = `api/attachments/${entity.attachmentId}/image/${encodedTitle}?${entity.utcDateModified}`;
url = `api/attachments/${entity.attachmentId}/image/${encodedTitle}?${entity.utcDateModified}">`;
}
$renderedContent // styles needed for the zoom to work well
.css("display", "flex")
.css("align-items", "center")
.css("justify-content", "center")
.css("flex-direction", "column"); // OCR text is displayed below the image.
.css("justify-content", "center");
const $img = $("<img>")
.attr("src", url || "")
@@ -209,35 +178,9 @@ async function renderImage(entity: FNote | FAttachment, $renderedContent: JQuery
}
imageContextMenuService.setupContextMenu($img);
if (entity instanceof FNote && options.showTextRepresentation) {
await addOCRTextIfAvailable(entity, $renderedContent);
}
}
async function addOCRTextIfAvailable(note: FNote, $content: JQuery<HTMLElement>) {
try {
const data = await server.get<TextRepresentationResponse>(`ocr/notes/${note.noteId}/text`);
if (data.success && data.hasOcr && data.text) {
const $ocrSection = $(`
<div class="ocr-text-section">
<div class="ocr-header">
<span class="bx bx-text"></span> ${t("ocr.extracted_text")}
</div>
<div class="ocr-content"></div>
</div>
`);
$ocrSection.find('.ocr-content').text(data.text);
$content.append($ocrSection);
}
} catch (error) {
// Silently fail if OCR API is not available
console.debug('Failed to fetch OCR text:', error);
}
}
async function renderFile(entity: FNote | FAttachment, type: string, $renderedContent: JQuery<HTMLElement>, options: RenderOptions = {}) {
async function renderFile(entity: FNote | FAttachment, type: string, $renderedContent: JQuery<HTMLElement>) {
let entityType, entityId;
if (entity instanceof FNote) {
@@ -277,10 +220,6 @@ async function renderFile(entity: FNote | FAttachment, type: string, $renderedCo
$content.append($videoPreview);
}
if (entity instanceof FNote && options.showTextRepresentation) {
await addOCRTextIfAvailable(entity, $content);
}
if (entityType === "notes" && "noteId" in entity) {
// TODO: we should make this available also for attachments, but there's a problem with "Open externally" support
// in attachment list
@@ -358,8 +297,6 @@ function getRenderingType(entity: FNote | FAttachment) {
if (type === "file" && mime === "application/pdf") {
type = "pdf";
} else if (type === "code" && entity instanceof FNote && entity.isMarkdown()) {
type = "markdown";
} else if ((type === "file" || type === "viewConfig") && mime && CODE_MIME_TYPES.has(mime) && !isIconPack) {
type = "code";
} else if (type === "file" && mime && mime.startsWith("audio/")) {

View File

@@ -15,47 +15,37 @@ export default async function renderText(note: FNote | FAttachment, $renderedCon
if (blob && !isHtmlEmpty(blob.content)) {
$renderedContent.append($('<div class="ck-content">').html(blob.content));
await postProcessRichContent(note, $renderedContent, options);
const seenNoteIds = options.seenNoteIds ?? new Set<string>();
seenNoteIds.add("noteId" in note ? note.noteId : note.attachmentId);
if (!options.noIncludedNotes) {
await renderIncludedNotes($renderedContent[0], seenNoteIds);
} else {
$renderedContent.find("section.include-note").remove();
}
if ($renderedContent.find("span.math-tex").length > 0) {
renderMathInElement($renderedContent[0], { trust: true });
}
const getNoteIdFromLink = (el: HTMLElement) => tree.getNoteIdFromUrl($(el).attr("href") || "");
const referenceLinks = $renderedContent.find<HTMLAnchorElement>("a.reference-link");
const noteIdsToPrefetch = referenceLinks.map((i, el) => getNoteIdFromLink(el));
await froca.getNotes(noteIdsToPrefetch);
for (const el of referenceLinks) {
const innerSpan = document.createElement("span");
await link.loadReferenceLinkTitle($(innerSpan), el.href);
el.replaceChildren(innerSpan);
}
await rewriteMermaidDiagramsInContainer($renderedContent[0] as HTMLDivElement);
await formatCodeBlocks($renderedContent);
} else if (note instanceof FNote && !options.noChildrenList) {
await renderChildrenList($renderedContent, note, options.includeArchivedNotes ?? false);
}
}
/**
* Apply the post-render passes that make CKEditor-compatible HTML fully
* interactive: expand `<section class="include-note">`, render inline math and
* Mermaid diagrams, rewrite reference-link titles, and highlight code blocks.
* Assumes the caller has already appended the HTML inside a `.ck-content` child
* of `$renderedContent`.
*/
export async function postProcessRichContent(note: FNote | FAttachment, $renderedContent: JQuery<HTMLElement>, options: RenderOptions = {}) {
const seenNoteIds = options.seenNoteIds ?? new Set<string>();
seenNoteIds.add("noteId" in note ? note.noteId : note.attachmentId);
if (!options.noIncludedNotes) {
await renderIncludedNotes($renderedContent[0], seenNoteIds);
} else {
$renderedContent.find("section.include-note").remove();
}
if ($renderedContent.find("span.math-tex").length > 0) {
renderMathInElement($renderedContent[0], { trust: true });
}
const getNoteIdFromLink = (el: HTMLElement) => tree.getNoteIdFromUrl($(el).attr("href") || "");
const referenceLinks = $renderedContent.find<HTMLAnchorElement>("a.reference-link");
const noteIdsToPrefetch = referenceLinks.map((i, el) => getNoteIdFromLink(el));
await froca.getNotes(noteIdsToPrefetch);
await Promise.all(referenceLinks.toArray().map(async (el) => {
const innerSpan = document.createElement("span");
await link.loadReferenceLinkTitle($(innerSpan), el.getAttribute("href"));
el.replaceChildren(innerSpan);
}));
await rewriteMermaidDiagramsInContainer($renderedContent[0] as HTMLDivElement);
await formatCodeBlocks($renderedContent);
}
async function renderIncludedNotes(contentEl: HTMLElement, seenNoteIds: Set<string>) {
// TODO: Consider duplicating with server's share/content_renderer.ts.
const includeNoteEls = contentEl.querySelectorAll("section.include-note");
@@ -111,107 +101,19 @@ export async function rewriteMermaidDiagramsInContainer(container: HTMLDivElemen
}
}
/**
* Per-container cache of rendered mermaid SVG keyed by diagram source text.
* Populated after each successful render; reused on subsequent renders to
* avoid flicker when the preview HTML is regenerated (e.g. live markdown
* editing). Entries for diagrams no longer present in the container are
* evicted on each run so the cache can't grow unbounded.
*/
const mermaidSvgCache = new WeakMap<HTMLElement, Map<string, string>>();
/**
* Per-container, ordered snapshot of the most recently rendered SVGs. Used as
* a positional placeholder so edits to a diagram's source keep the previous
* SVG visible while the new one renders offscreen.
*/
const mermaidLastRenderedByPosition = new WeakMap<HTMLElement, string[]>();
export async function applyInlineMermaid(container: HTMLDivElement) {
const nodes = Array.from(container.querySelectorAll<HTMLElement>("div.mermaid-diagram"));
if (!nodes.length) {
mermaidLastRenderedByPosition.delete(container);
return;
}
let cache = mermaidSvgCache.get(container);
if (!cache) {
cache = new Map();
mermaidSvgCache.set(container, cache);
}
const lastRendered = mermaidLastRenderedByPosition.get(container) ?? [];
// Decide per node: exact cache hit → paint final SVG; source changed →
// paint the previous SVG (by position) as a placeholder and queue an
// offscreen re-render. This way the user keeps seeing the old diagram
// until mermaid has finished producing the new one.
const pending: Array<{ visible: HTMLElement; source: string }> = [];
const seenSources = new Set<string>();
for (const [ index, node ] of nodes.entries()) {
const source = (node.textContent ?? "").trim();
seenSources.add(source);
const cached = cache.get(source);
if (cached) {
node.innerHTML = cached;
node.setAttribute("data-processed", "true");
continue;
}
pending.push({ visible: node, source });
const placeholder = lastRendered[index];
if (placeholder) {
node.innerHTML = placeholder;
}
}
// Evict cache entries whose source is no longer present.
for (const key of [ ...cache.keys() ]) {
if (!seenSources.has(key)) cache.delete(key);
}
if (!pending.length) {
mermaidLastRenderedByPosition.set(container, nodes.map((n) => n.innerHTML));
return;
}
// Initialize mermaid
const mermaid = (await import("mermaid")).default;
mermaid.initialize(getMermaidConfig());
// Render clones offscreen so the visible nodes keep showing the placeholder
// until the new SVG is ready. Keeps mermaid away from our placeholder SVG
// (which would otherwise confuse its text-based parser).
const offscreen = document.createElement("div");
offscreen.style.cssText = "position:absolute;left:-9999px;top:-9999px;width:0;height:0;overflow:hidden;visibility:hidden;";
document.body.appendChild(offscreen);
const pairs = pending.map(({ visible, source }) => {
const clone = document.createElement("div");
clone.className = "mermaid-diagram";
clone.textContent = source;
offscreen.appendChild(clone);
return { visible, clone, source };
});
const nodes = Array.from(container.querySelectorAll<HTMLElement>("div.mermaid-diagram"));
try {
await mermaid.run({ nodes: pairs.map((p) => p.clone) });
for (const { visible, clone, source } of pairs) {
if (clone.getAttribute("data-processed") !== "true") continue;
const svg = clone.innerHTML;
visible.innerHTML = svg;
visible.setAttribute("data-processed", "true");
cache.set(source, svg);
}
await mermaid.run({ nodes });
} catch (e) {
console.error(e);
} finally {
offscreen.remove();
console.log(e);
}
mermaidLastRenderedByPosition.set(container, nodes.map((n) => n.innerHTML));
}
export async function renderChildrenList($renderedContent: JQuery<HTMLElement>, note: FNote, includeArchivedNotes: boolean) {
async function renderChildrenList($renderedContent: JQuery<HTMLElement>, note: FNote, includeArchivedNotes: boolean) {
let childNoteIds = note.getChildNoteIds();
if (!childNoteIds.length) {

View File

@@ -84,55 +84,6 @@ async function createSearchNote(opts = {}) {
return await froca.getNote(note.noteId);
}
async function createLlmChat() {
const note = await server.post<FNoteRow>("special-notes/llm-chat");
await ws.waitForMaxKnownEntityChangeId();
return await froca.getNote(note.noteId);
}
/**
* Gets the most recently modified LLM chat.
* Returns null if no chat exists.
*/
async function getMostRecentLlmChat() {
const note = await server.get<FNoteRow | null>("special-notes/most-recent-llm-chat");
if (!note) {
return null;
}
await ws.waitForMaxKnownEntityChangeId();
return await froca.getNote(note.noteId);
}
/**
* Gets the most recent LLM chat, or creates a new one if none exists.
* Used by sidebar chat for persistent conversations across page refreshes.
*/
async function getOrCreateLlmChat() {
const note = await server.get<FNoteRow>("special-notes/get-or-create-llm-chat");
await ws.waitForMaxKnownEntityChangeId();
return await froca.getNote(note.noteId);
}
export interface RecentLlmChat {
noteId: string;
title: string;
dateModified: string;
}
/**
* Gets a list of recent LLM chats for the history popup.
*/
async function getRecentLlmChats(limit: number = 10): Promise<RecentLlmChat[]> {
return await server.get<RecentLlmChat[]>(`special-notes/recent-llm-chats?limit=${limit}`);
}
export default {
getInboxNote,
getTodayNote,
@@ -143,9 +94,5 @@ export default {
getMonthNote,
getYearNote,
createSqlConsole,
createSearchNote,
createLlmChat,
getMostRecentLlmChat,
getOrCreateLlmChat,
getRecentLlmChats
createSearchNote
};

View File

@@ -1,11 +1,9 @@
import { Modal } from "bootstrap";
import appContext from "../components/app_context.js";
import type { ConfirmDialogOptions, ConfirmDialogResult, ConfirmWithMessageOptions, MessageType } from "../widgets/dialogs/confirm.js";
import { InfoExtraProps } from "../widgets/dialogs/info.jsx";
import type { PromptDialogOptions } from "../widgets/dialogs/prompt.js";
import { focusSavedElement, saveFocusedElement } from "./focus.js";
import keyboardActionsService from "./keyboard_actions.js";
import { InfoExtraProps } from "../widgets/dialogs/info.jsx";
export async function openDialog($dialog: JQuery<HTMLElement>, closeActDialog = true, config?: Partial<Modal.Options>) {
if (closeActDialog) {
@@ -27,6 +25,7 @@ export async function openDialog($dialog: JQuery<HTMLElement>, closeActDialog =
}
});
const keyboardActionsService = (await import("./keyboard_actions.js")).default;
keyboardActionsService.updateDisplayedShortcuts($dialog);
return $dialog;

View File

@@ -1,30 +0,0 @@
import { describe, expect, it } from "vitest";
import { isValidDocName } from "./doc_renderer.js";
describe("isValidDocName", () => {
it("accepts valid docNames", () => {
expect(isValidDocName("launchbar_intro")).toBe(true);
expect(isValidDocName("User Guide/Quick Start")).toBe(true);
expect(isValidDocName("User Guide/User Guide/Quick Start")).toBe(true);
expect(isValidDocName("Quick Start Guide")).toBe(true);
expect(isValidDocName("quick_start_guide")).toBe(true);
expect(isValidDocName("quick-start-guide")).toBe(true);
});
it("rejects path traversal attacks", () => {
expect(isValidDocName("..")).toBe(false);
expect(isValidDocName("../etc/passwd")).toBe(false);
expect(isValidDocName("foo/../bar")).toBe(false);
expect(isValidDocName("../../../../api/notes/_malicious/open")).toBe(false);
expect(isValidDocName("..\\etc\\passwd")).toBe(false);
expect(isValidDocName("foo\\bar")).toBe(false);
});
it("rejects URL manipulation attacks", () => {
expect(isValidDocName("../../../../api/notes/_malicious/open?x=")).toBe(false);
expect(isValidDocName("foo#bar")).toBe(false);
expect(isValidDocName("%2e%2e")).toBe(false);
expect(isValidDocName("%2e%2e%2f%2e%2e%2fapi")).toBe(false);
});
});

View File

@@ -3,39 +3,22 @@ import { applyReferenceLinks } from "../widgets/type_widgets/text/read_only_help
import { getCurrentLanguage } from "./i18n.js";
import { formatCodeBlocks } from "./syntax_highlight.js";
/**
* Validates a docName to prevent path traversal attacks.
* Allows forward slashes for subdirectories (e.g., "User Guide/Quick Start")
* but blocks traversal sequences and URL manipulation characters.
*/
export function isValidDocName(docName: string): boolean {
// Allow alphanumeric characters, spaces, underscores, hyphens, and forward slashes.
const validDocNameRegex = /^[a-zA-Z0-9_/\- ]+$/;
return validDocNameRegex.test(docName);
}
export default function renderDoc(note: FNote) {
return new Promise<JQuery<HTMLElement>>((resolve) => {
const docName = note.getLabelValue("docName");
const $content = $("<div>");
// find doc based on language
const url = getUrl(docName, getCurrentLanguage());
if (url) {
if (docName) {
// find doc based on language
const url = getUrl(docName, getCurrentLanguage());
$content.load(url, async (response, status) => {
// fallback to english doc if no translation available
if (status === "error") {
const fallbackUrl = getUrl(docName, "en");
if (fallbackUrl) {
$content.load(fallbackUrl, async () => {
await processContent(fallbackUrl, $content);
resolve($content);
});
} else {
$content.load(fallbackUrl, async () => {
await processContent(fallbackUrl, $content);
resolve($content);
}
});
return;
}
@@ -45,6 +28,8 @@ export default function renderDoc(note: FNote) {
} else {
resolve($content);
}
return $content;
});
}
@@ -63,14 +48,7 @@ async function processContent(url: string, $content: JQuery<HTMLElement>) {
await applyReferenceLinks($content[0]);
}
function getUrl(docNameValue: string | null, language: string) {
if (!docNameValue) return;
if (!isValidDocName(docNameValue)) {
console.error(`Invalid docName: ${docNameValue}`);
return null;
}
function getUrl(docNameValue: string, language: string) {
// Cannot have spaces in the URL due to how JQuery.load works.
docNameValue = docNameValue.replaceAll(" ", "%20");
// The user guide is available only in English, so make sure we are requesting correctly since 404s in standalone client are treated differently.
@@ -83,7 +61,7 @@ function getBasePath() {
return `server-assets`;
}
if (window.glob.isDev) {
return `${window.glob.assetPath}/..`;
return `${window.glob.assetPath }/..`;
}
return window.glob.assetPath;
}

View File

@@ -1,6 +1,6 @@
import { t } from "./i18n";
import options from "./options";
import { isMobile, isStandalone } from "./utils";
import { isMobile } from "./utils";
export interface ExperimentalFeature {
id: string;
@@ -13,21 +13,11 @@ export const experimentalFeatures = [
id: "new-layout",
name: t("experimental_features.new_layout_name"),
description: t("experimental_features.new_layout_description"),
},
{
id: "llm",
name: t("experimental_features.llm_name"),
description: t("experimental_features.llm_description"),
}
] as const satisfies ExperimentalFeature[];
export type ExperimentalFeatureId = typeof experimentalFeatures[number]["id"];
/** Returns experimental features available for the current platform (excludes LLM in standalone mode). */
export function getAvailableExperimentalFeatures() {
return experimentalFeatures.filter(f => !(f.id === "llm" && isStandalone));
}
let enabledFeatures: Set<ExperimentalFeatureId> | null = null;
export function isExperimentalFeatureEnabled(featureId: ExperimentalFeatureId): boolean {
@@ -35,24 +25,14 @@ export function isExperimentalFeatureEnabled(featureId: ExperimentalFeatureId):
return (isMobile() || options.is("newLayout"));
}
// LLM features require server-side API calls that don't work in standalone mode
// due to CORS restrictions from LLM providers (OpenAI, Google don't allow browser requests)
if (featureId === "llm" && isStandalone) {
return false;
}
return getEnabledFeatures().has(featureId);
}
export function getEnabledExperimentalFeatureIds() {
let values = [ ...getEnabledFeatures().values() ];
const values = [ ...getEnabledFeatures().values() ];
if (isMobile() || options.is("newLayout")) {
values.push("new-layout");
}
// LLM is not available in standalone mode
if (isStandalone) {
values = values.filter(v => v !== "llm");
}
return values;
}

View File

@@ -1,16 +1,14 @@
import type { OptionNames } from "@triliumnext/commons";
import appContext from "../components/app_context.js";
import FAttachment, { type FAttachmentRow } from "../entities/fattachment.js";
import FAttribute, { type FAttributeRow } from "../entities/fattribute.js";
import LoadResults from "./load_results.js";
import froca from "./froca.js";
import utils from "./utils.js";
import options from "./options.js";
import noteAttributeCache from "./note_attribute_cache.js";
import FBranch, { type FBranchRow } from "../entities/fbranch.js";
import FAttribute, { type FAttributeRow } from "../entities/fattribute.js";
import FAttachment, { type FAttachmentRow } from "../entities/fattachment.js";
import type { default as FNote, FNoteRow } from "../entities/fnote.js";
import type { EntityChange } from "../server_types.js";
import froca from "./froca.js";
import LoadResults from "./load_results.js";
import noteAttributeCache from "./note_attribute_cache.js";
import options from "./options.js";
import utils from "./utils.js";
import type { OptionNames } from "@triliumnext/commons";
async function processEntityChanges(entityChanges: EntityChange[]) {
const loadResults = new LoadResults(entityChanges);
@@ -65,7 +63,7 @@ async function processEntityChanges(entityChanges: EntityChange[]) {
if (entityName === "branches" && !((entity as FBranchRow).parentNoteId in froca.notes)) {
missingNoteIds.push((entity as FBranchRow).parentNoteId);
} else if (entityName === "attributes") {
const attributeEntity = entity as FAttributeRow;
let attributeEntity = entity as FAttributeRow;
if (attributeEntity.type === "relation" && (attributeEntity.name === "template" || attributeEntity.name === "inherit") && !(attributeEntity.value in froca.notes)) {
missingNoteIds.push(attributeEntity.value);
}
@@ -81,6 +79,7 @@ async function processEntityChanges(entityChanges: EntityChange[]) {
noteAttributeCache.invalidate();
}
const appContext = (await import("../components/app_context.js")).default;
await appContext.triggerEvent("entitiesReloaded", { loadResults });
}
}

View File

@@ -1,4 +1,4 @@
import { createContext, Fragment, h, VNode } from "preact";
import { Fragment, h, VNode } from "preact";
import * as hooks from "preact/hooks";
import ActionButton from "../widgets/react/ActionButton";
@@ -47,7 +47,6 @@ export const preactAPI = Object.freeze({
// Core
h,
Fragment,
createContext,
/**
* Method that must be run for widget scripts that run on Preact, using JSX. The method just returns the same definition, reserved for future typechecking and perhaps validation purposes.

View File

@@ -1,10 +1,10 @@
import { findDuplicateJsonKeys, LOCALES } from "@triliumnext/commons";
import { LOCALES } from "@triliumnext/commons";
import { readFileSync } from "fs";
import { join } from "path";
import { describe, expect, it } from "vitest";
describe("i18n", () => {
it("translations are valid JSON with no duplicate keys", () => {
it("translations are valid JSON", () => {
for (const locale of LOCALES) {
if (locale.contentOnly || locale.id === "en_rtl") {
continue;
@@ -14,13 +14,6 @@ describe("i18n", () => {
const translationFile = readFileSync(translationPath, { encoding: "utf-8" });
expect(() => JSON.parse(translationFile), `JSON error while parsing locale '${locale.id}' at "${translationPath}"`)
.not.toThrow();
const duplicates = findDuplicateJsonKeys(translationFile);
expect(
duplicates,
`Duplicate keys in locale '${locale.id}' at "${translationPath}":\n${
duplicates.map((d) => ` - "${d.key}" (line ${d.line})`).join("\n")}`
).toEqual([]);
}
});
});

View File

@@ -1,14 +1,22 @@
import { LOCALE_IDS, LOCALES, setDayjsLocale } from "@triliumnext/commons";
import { type Locale, LOCALE_IDS, setDayjsLocale } from "@triliumnext/commons";
import i18next from "i18next";
import i18nextHttpBackend from "i18next-http-backend";
import { initReactI18next } from "react-i18next";
import options from "./options.js";
import server from "./server.js";
let locales: Locale[] | null;
/**
* A deferred promise that resolves when translations are initialized.
*/
export const translationsInitializedPromise = $.Deferred();
export async function initLocale(locale: LOCALE_IDS = "en") {
export async function initLocale() {
const locale = ((options.get("locale") as string) || "en") as LOCALE_IDS;
locales = await server.get<Locale[]>("options/locales");
i18next.use(initReactI18next);
await i18next.use(i18nextHttpBackend).init({
@@ -17,7 +25,8 @@ export async function initLocale(locale: LOCALE_IDS = "en") {
backend: {
loadPath: `${window.glob.assetPath}/translations/{{lng}}/{{ns}}.json`
},
returnEmptyString: false
returnEmptyString: false,
showSupportNotice: false
});
await setDayjsLocale(locale);
@@ -25,7 +34,11 @@ export async function initLocale(locale: LOCALE_IDS = "en") {
}
export function getAvailableLocales() {
return LOCALES;
if (!locales) {
throw new Error("Tried to load list of locales, but localization is not yet initialized.");
}
return locales;
}
/**
@@ -36,7 +49,7 @@ export function getAvailableLocales() {
*/
export function getLocaleById(localeId: string | null | undefined) {
if (!localeId) return null;
return LOCALES.find((l) => l.id === localeId) ?? null;
return locales?.find((l) => l.id === localeId) ?? null;
}
export const t = i18next.t;

View File

@@ -19,8 +19,7 @@ export const byNoteType: Record<Exclude<NoteType, "book">, string | null> = {
search: null,
text: null,
webView: null,
spreadsheet: null,
llmChat: null
spreadsheet: null
};
export const byBookType: Record<ViewTypeOptions, string | null> = {

View File

@@ -4,8 +4,6 @@ import appContext, { type NoteCommandData } from "../components/app_context.js";
import { openInCurrentNoteContext } from "../components/note_context.js";
import linkContextMenuService from "../menus/link_context_menu.js";
import froca from "./froca.js";
import { t } from "./i18n.js";
import { showError } from "./toast.js";
import treeService from "./tree.js";
import utils from "./utils.js";
@@ -30,7 +28,7 @@ async function getLinkIcon(noteId: string, viewMode: ViewMode | undefined) {
return icon;
}
export type ViewMode = "default" | "source" | "attachments" | "contextual-help" | "note-map" | "ocr";
export type ViewMode = "default" | "source" | "attachments" | "contextual-help" | "note-map";
export interface ViewScope {
/**
@@ -60,8 +58,6 @@ export interface ViewScope {
*/
tocPreviousVisible?: boolean;
tocCollapsedHeadings?: Set<string>;
/** When set, scrolls to a bookmark anchor within the note after navigation. */
bookmark?: string;
}
interface CreateLinkOptions {
@@ -246,7 +242,7 @@ export function parseNavigationStateFromUrl(url: string | undefined) {
hoistedNoteId = value;
} else if (name === "searchString") {
searchString = value; // supports triggering search from URL, e.g. #?searchString=blabla
} else if (["viewMode", "attachmentId", "bookmark"].includes(name)) {
} else if (["viewMode", "attachmentId"].includes(name)) {
(viewScope as any)[name] = value;
} else if (name === "popup") {
openInPopup = true;
@@ -337,30 +333,15 @@ export function goToLinkExt(evt: MouseEvent | JQuery.ClickEvent | JQuery.MouseDo
if (openInNewTab || openInNewWindow || (isLeftClick && (withinEditLink || outsideOfCKEditor))) {
if (hrefLink.toLowerCase().startsWith("http") || hrefLink.startsWith("api/")) {
window.open(hrefLink, "_blank");
} else if (ALLOWED_PROTOCOLS.some((protocol) => hrefLink.toLowerCase().startsWith(`${protocol}:`))) {
} else {
// Enable protocols supported by CKEditor 5 to be clickable.
if (utils.isElectron()) {
const electron = utils.dynamicRequire("electron");
const reportLinkError = (e: unknown) => {
const message = e instanceof Error ? e.message : String(e);
logError(`Failed to open link '${hrefLink}': ${message}`);
showError(t("link.failed_to_open", { href: hrefLink, message }));
};
if (hrefLink.toLowerCase().startsWith("file:")) {
// shell.openExternal mishandles Unicode file:// URLs on Windows;
// convert to a filesystem path and use shell.openPath instead.
// Normalize file://c:/... (2 slashes — drive read as host) to file:///c:/...
const normalized = hrefLink.replace(/^file:\/\/(?=[a-zA-Z]:)/i, "file:///");
const { fileURLToPath } = utils.dynamicRequire("url");
electron.shell.openPath(fileURLToPath(normalized)).then((err: string) => {
if (err) reportLinkError(new Error(err));
}).catch(reportLinkError);
if (ALLOWED_PROTOCOLS.some((protocol) => hrefLink.toLowerCase().startsWith(`${protocol}:`))) {
if ( utils.isElectron()) {
const electron = utils.dynamicRequire("electron");
electron.shell.openExternal(hrefLink);
} else {
electron.shell.openExternal(hrefLink).catch(reportLinkError);
window.open(hrefLink, "_blank");
}
} else {
window.open(hrefLink, "_blank");
}
}
}
@@ -434,13 +415,6 @@ async function loadReferenceLinkTitle($el: JQuery<HTMLElement>, href: string | n
const title = await getReferenceLinkTitle(href);
$el.text(title);
if (viewScope?.bookmark) {
$el.append($("<small>").append(
$("<span>").addClass("bx bx-bookmark"),
document.createTextNode(viewScope.bookmark)
));
}
if (note) {
const icon = await getLinkIcon(noteId, viewScope.viewMode);
@@ -466,8 +440,8 @@ async function getReferenceLinkTitle(href: string) {
return attachment ? attachment.title : "[missing attachment]";
}
return note.title;
}
function getReferenceLinkTitleSync(href: string) {
@@ -490,12 +464,8 @@ function getReferenceLinkTitleSync(href: string) {
return attachment ? attachment.title : "[missing attachment]";
}
if (viewScope?.bookmark) {
return `${note.title} - ${viewScope.bookmark}`;
}
return note.title;
}
if (glob.device !== "print") {

View File

@@ -1,116 +0,0 @@
import type { LlmChatConfig, LlmCitation, LlmMessage, LlmModelInfo,LlmUsage } from "@triliumnext/commons";
import server from "./server.js";
/**
* Fetch available models from all configured providers.
*/
export async function getAvailableModels(): Promise<LlmModelInfo[]> {
const response = await server.get<{ models?: LlmModelInfo[] }>("llm-chat/models");
return response.models ?? [];
}
export interface StreamCallbacks {
onChunk: (text: string) => void;
onThinking?: (text: string) => void;
onToolUse?: (toolName: string, input: Record<string, unknown>) => void;
onToolResult?: (toolName: string, result: string, isError?: boolean) => void;
onCitation?: (citation: LlmCitation) => void;
onUsage?: (usage: LlmUsage) => void;
onError: (error: string) => void;
onDone: () => void;
}
/**
* Stream a chat completion from the LLM API using Server-Sent Events.
*/
export async function streamChatCompletion(
messages: LlmMessage[],
config: LlmChatConfig,
callbacks: StreamCallbacks,
abortSignal?: AbortSignal
): Promise<void> {
const headers = await server.getHeaders();
const response = await fetch(`${window.glob.baseApiUrl}llm-chat/stream`, {
method: "POST",
headers: {
...headers,
"Content-Type": "application/json"
} as HeadersInit,
body: JSON.stringify({ messages, config }),
signal: abortSignal
});
if (!response.ok) {
callbacks.onError(`HTTP ${response.status}: ${response.statusText}`);
return;
}
const reader = response.body?.getReader();
if (!reader) {
callbacks.onError("No response body");
return;
}
const decoder = new TextDecoder();
let buffer = "";
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split("\n");
buffer = lines.pop() || "";
for (const line of lines) {
if (line.startsWith("data: ")) {
try {
const data = JSON.parse(line.slice(6));
switch (data.type) {
case "text":
callbacks.onChunk(data.content);
break;
case "thinking":
callbacks.onThinking?.(data.content);
break;
case "tool_use":
callbacks.onToolUse?.(data.toolName, data.toolInput);
// Yield to force Preact to commit the pending tool call
// state before we process the result.
await new Promise((r) => setTimeout(r, 1));
break;
case "tool_result":
callbacks.onToolResult?.(data.toolName, data.result, data.isError);
await new Promise((r) => setTimeout(r, 1));
break;
case "citation":
if (data.citation) {
callbacks.onCitation?.(data.citation);
}
break;
case "usage":
if (data.usage) {
callbacks.onUsage?.(data.usage);
}
break;
case "error":
callbacks.onError(data.error);
break;
case "done":
callbacks.onDone();
break;
}
} catch (e) {
console.error("Failed to parse SSE data line:", line, e);
}
}
}
}
} finally {
reader.releaseLock();
}
}

View File

@@ -32,9 +32,4 @@ describe("Mermaid", () => {
`;
expect(postprocessMermaidSvg(before)).toBe(after);
});
it("replaces &nbsp; with numeric entity for valid XML", () => {
expect(postprocessMermaidSvg("<text>a&nbsp;b&nbsp;&nbsp;c</text>"))
.toBe("<text>a&#160;b&#160;&#160;c</text>");
});
});

View File

@@ -49,15 +49,11 @@ export async function loadElkIfNeeded(mermaid: Mermaid, mermaidContent: string)
* Processes the output of a Mermaid SVG render before it should be delivered to the user.
*
* <p>
* Currently this fixes <br> to <br/> and replaces named HTML entities like &nbsp; with their
* numeric equivalents, both of which would otherwise cause invalid XML when the SVG is saved
* as an attachment.
* Currently this fixes <br> to <br/> which would otherwise cause an invalid XML.
*
* @param svg the Mermaid SVG to process.
* @returns the processed SVG.
*/
export function postprocessMermaidSvg(svg: string) {
return svg
.replaceAll(/<br\s*>/ig, "<br/>")
.replaceAll(/&nbsp;/g, "&#160;");
return svg.replaceAll(/<br\s*>/ig, "<br/>");
}

View File

@@ -68,8 +68,7 @@ async function autocompleteSourceForCKEditor(queryText: string) {
name: row.notePathTitle || "",
link: `#${row.notePath}`,
notePath: row.notePath,
highlightedNotePathTitle: row.highlightedNotePathTitle,
icon: row.icon
highlightedNotePathTitle: row.highlightedNotePathTitle
};
})
);

View File

@@ -1,7 +1,6 @@
import type { NoteType } from "../entities/fnote.js";
import type { MenuCommandItem, MenuItem, MenuItemBadge, MenuSeparatorItem } from "../menus/context_menu.js";
import type { TreeCommandNames } from "../menus/tree_context_menu.js";
import { isExperimentalFeatureEnabled } from "./experimental_features.js";
import froca from "./froca.js";
import { t } from "./i18n.js";
import server from "./server.js";
@@ -27,7 +26,7 @@ export const NOTE_TYPES: NoteTypeMapping[] = [
// The default note type (always the first item)
{ type: "text", mime: "text/html", title: t("note_types.text"), icon: "bx-note" },
{ type: "spreadsheet", mime: "application/json", title: t("note_types.spreadsheet"), icon: "bx-table", isBeta: true, isNew: true },
{ type: "spreadsheet", mime: "application/json", title: t("note_types.spreadsheet"), icon: "bx-table", isBeta: true },
// Text notes group
{ type: "book", mime: "", title: t("note_types.book"), icon: "bx-book" },
@@ -42,14 +41,12 @@ export const NOTE_TYPES: NoteTypeMapping[] = [
{ type: "relationMap", mime: "application/json", title: t("note_types.relation-map"), icon: "bxs-network-chart" },
// Misc note types
{ type: "llmChat", mime: "application/json", title: t("note_types.llm-chat"), icon: "bx-message-square-dots", isBeta: true },
{ type: "render", mime: "", title: t("note_types.render-note"), icon: "bx-extension" },
{ type: "search", title: t("note_types.saved-search"), icon: "bx-file-find", static: true },
{ type: "webView", mime: "", title: t("note_types.web-view"), icon: "bx-globe-alt" },
// Code notes
{ type: "code", mime: "text/plain", title: t("note_types.code"), icon: "bx-code" },
{ type: "code", mime: "text/x-markdown", title: t("note_types.markdown"), icon: "bxl-markdown", isNew: true },
// Reserved types (cannot be created by the user)
{ type: "contentWidget", mime: "", title: t("note_types.widget"), reserved: true },
@@ -95,13 +92,11 @@ async function getNoteTypeItems(command?: TreeCommandNames) {
function getBlankNoteTypes(command?: TreeCommandNames): MenuItem<TreeCommandNames>[] {
return NOTE_TYPES
.filter((nt) => !nt.reserved && nt.type !== "book")
.filter((nt) => nt.type !== "llmChat" || isExperimentalFeatureEnabled("llm"))
.map((nt) => {
const menuItem: MenuCommandItem<TreeCommandNames> = {
title: nt.title,
command,
type: nt.type,
mime: nt.mime,
uiIcon: `bx ${nt.icon}`,
badges: []
};

View File

@@ -18,10 +18,6 @@ async function render(note: FNote, $el: JQuery<HTMLElement>, onError?: ErrorHand
for (const renderNoteId of renderNoteIds) {
const bundle = await server.postWithSilentInternalServerError<Bundle>(`script/bundle/${renderNoteId}`);
if (!bundle) {
throw new Error(`Script note '${renderNoteId}' could not be loaded. It may be protected and require an active protected session.`);
}
const $scriptContainer = $("<div>");
$el.append($scriptContainer);

View File

@@ -1,4 +1,3 @@
import { t } from "./i18n.js";
import utils, { isShare } from "./utils.js";
import ValidationError from "./validation_error.js";
@@ -33,7 +32,8 @@ async function getHeaders(headers?: Headers) {
return {};
}
const activeNoteContext = glob.appContext?.tabManager ? glob.appContext.tabManager.getActiveContext() : null;
const appContext = (await import("../components/app_context.js")).default;
const activeNoteContext = appContext.tabManager ? appContext.tabManager.getActiveContext() : null;
// headers need to be lowercase because node.js automatically converts them to lower case
// also avoiding using underscores instead of dashes since nginx filters them out by default
@@ -270,11 +270,7 @@ function ajax(url: string, method: string, data: unknown, headers: Headers, opts
} else if (opts.silentInternalServerError && jqXhr.status === 500) {
// report nothing
} else {
try {
await reportError(method, url, jqXhr.status, jqXhr.responseText);
} catch {
// reportError may throw (e.g. ValidationError); ensure rej() is still called below.
}
await reportError(method, url, jqXhr.status, jqXhr.responseText);
}
rej(jqXhr.responseText);
@@ -344,7 +340,6 @@ async function reportError(method: string, url: string, statusCode: number, resp
} catch (e) {}
}
// Dynamic import to avoid circular dependency (toast → app_context → options → server).
const toastService = (await import("./toast.js")).default;
const messageStr = (typeof message === "string" ? message : JSON.stringify(message)) || "-";
@@ -358,6 +353,7 @@ async function reportError(method: string, url: string, statusCode: number, resp
...response
});
} else {
const { t } = await import("./i18n.js");
if (statusCode === 400 && (url.includes("%23") || url.includes("%2F"))) {
toastService.showPersistent({
id: "trafik-blocked",
@@ -371,7 +367,8 @@ async function reportError(method: string, url: string, statusCode: number, resp
t("server.unknown_http_error_content", { statusCode, method, url, message: messageStr }),
15_000);
}
window.logError(`${statusCode} ${method} ${url} - ${message}`);
const { logError } = await import("./ws.js");
logError(`${statusCode} ${method} ${url} - ${message}`);
}
}

View File

@@ -1,87 +0,0 @@
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import SpacedUpdate from "./spaced_update";
// Mock logError which is a global in Trilium
vi.stubGlobal("logError", vi.fn());
describe("SpacedUpdate", () => {
beforeEach(() => {
vi.useFakeTimers();
});
afterEach(() => {
vi.useRealTimers();
});
it("should only call updater once per interval even with multiple pending callbacks", async () => {
const updater = vi.fn(async () => {
// Simulate a slow network request - this is where the race condition occurs
await new Promise((resolve) => setTimeout(resolve, 100));
});
const spacedUpdate = new SpacedUpdate(updater, 50);
// Simulate rapid typing - each keystroke calls scheduleUpdate()
// This queues multiple setTimeout callbacks due to recursive scheduleUpdate() calls
for (let i = 0; i < 10; i++) {
spacedUpdate.scheduleUpdate();
// Small delay between keystrokes
await vi.advanceTimersByTimeAsync(5);
}
// Advance time past the update interval to trigger the update
await vi.advanceTimersByTimeAsync(100);
// Let the "network request" complete and any pending callbacks run
await vi.advanceTimersByTimeAsync(200);
// The updater should have been called only ONCE, not multiple times
// With the bug, multiple pending setTimeout callbacks would all pass the time check
// during the async updater call and trigger multiple concurrent requests
expect(updater).toHaveBeenCalledTimes(1);
});
it("should call updater again if changes occur during the update", async () => {
const updater = vi.fn(async () => {
await new Promise((resolve) => setTimeout(resolve, 50));
});
const spacedUpdate = new SpacedUpdate(updater, 30);
// First update
spacedUpdate.scheduleUpdate();
await vi.advanceTimersByTimeAsync(40);
// Schedule another update while the first one is in progress
spacedUpdate.scheduleUpdate();
// Let first update complete
await vi.advanceTimersByTimeAsync(60);
// Advance past the interval again for the second update
await vi.advanceTimersByTimeAsync(100);
// Should have been called twice - once for each distinct change period
expect(updater).toHaveBeenCalledTimes(2);
});
it("should restore changed flag on error so retry can happen", async () => {
const updater = vi.fn()
.mockRejectedValueOnce(new Error("Network error"))
.mockResolvedValue(undefined);
const spacedUpdate = new SpacedUpdate(updater, 50);
spacedUpdate.scheduleUpdate();
// Advance to trigger first update (which will fail)
await vi.advanceTimersByTimeAsync(60);
// The error should have restored the changed flag, so scheduling again should work
spacedUpdate.scheduleUpdate();
await vi.advanceTimersByTimeAsync(60);
expect(updater).toHaveBeenCalledTimes(2);
});
});

View File

@@ -77,22 +77,16 @@ export default class SpacedUpdate {
}
if (Date.now() - this.lastUpdated > this.updateInterval) {
// Update these BEFORE the async call to prevent race conditions.
// Multiple setTimeout callbacks may be pending from recursive scheduleUpdate() calls.
// Without this, they would all pass the time check during the await and trigger multiple requests.
this.lastUpdated = Date.now();
this.changed = false;
this.onStateChanged("saving");
try {
await this.updater();
this.onStateChanged("saved");
this.changed = false;
} catch (e) {
// Restore changed flag on error so a retry can happen
this.changed = true;
this.onStateChanged("error");
logError(getErrorMessage(e));
}
this.lastUpdated = Date.now();
} else {
// update isn't triggered but changes are still pending, so we need to schedule another check
this.scheduleUpdate();

View File

@@ -33,14 +33,6 @@ export async function formatCodeBlocks($container: JQuery<HTMLElement>) {
applySingleBlockSyntaxHighlight($(codeBlock), normalizedMimeType);
}
}
// Add click-to-copy for inline code (code elements not inside pre)
if (glob.device !== "print") {
const inlineCodeElements = $container.find("code:not(pre code)");
for (const inlineCode of inlineCodeElements) {
applyInlineCodeCopy($(inlineCode));
}
}
}
export function applyCopyToClipboardButton($codeBlock: JQuery<HTMLElement>) {
@@ -59,23 +51,6 @@ export function applyCopyToClipboardButton($codeBlock: JQuery<HTMLElement>) {
$codeBlock.parent().append($copyButton);
}
export function applyInlineCodeCopy($inlineCode: JQuery<HTMLElement>) {
$inlineCode
.addClass("copyable-inline-code")
.attr("title", t("code_block.click_to_copy"))
.off("click")
.on("click", (e) => {
e.stopPropagation();
const text = $inlineCode.text();
if (!isShare) {
copyTextWithToast(text);
} else {
copyText(text);
}
});
}
/**
* Applies syntax highlight to the given code block (assumed to be <pre><code>), using highlight.js.
*/

View File

@@ -1,35 +0,0 @@
export function getThemeStyle(): "auto" | "light" | "dark" {
const configuredTheme = window.glob?.theme;
if (configuredTheme === "auto" || configuredTheme === "next") {
return "auto";
}
if (configuredTheme === "light" || configuredTheme === "dark") {
return configuredTheme;
}
if (configuredTheme === "next-light") {
return "light";
}
if (configuredTheme === "next-dark") {
return "dark";
}
const style = window.getComputedStyle(document.body);
const themeStyle = style.getPropertyValue("--theme-style");
if (style.getPropertyValue("--theme-style-auto") !== "true" && (themeStyle === "light" || themeStyle === "dark")) {
return themeStyle as "light" | "dark";
}
return "auto";
}
export function getEffectiveThemeStyle(): "light" | "dark" {
const themeStyle = getThemeStyle();
if (themeStyle === "auto") {
return window.matchMedia && window.matchMedia("(prefers-color-scheme: dark)").matches ? "dark" : "light";
}
return themeStyle === "dark" ? "dark" : "light";
}

Some files were not shown because too many files have changed in this diff Show More