Compare commits

..

30 Commits

Author SHA1 Message Date
Elian Doran
d0abcfe355 chore(export): bring back content CSS 2026-03-28 00:29:13 +02:00
Elian Doran
8b1d0063ff fix(standalone): unable to download ZIPs 2026-03-28 00:26:11 +02:00
Elian Doran
8cd7e48e85 fix(server): unable to export as share 2026-03-27 23:54:20 +02:00
Elian Doran
aee005b624 refactor(core): move zip provider out of import 2026-03-27 23:23:26 +02:00
Elian Doran
1d050e8784 fix(core): use of Node.js path 2026-03-27 23:18:39 +02:00
Elian Doran
0c37b2ce5c fix(export/single): crash due to use of Buffer 2026-03-27 23:13:40 +02:00
Elian Doran
73f401f106 fix(standalone/export): redirects to URL without downloading 2026-03-27 23:13:30 +02:00
Elian Doran
d2a0c540ba fix(core): get rid of Node dependencies from ZIP export 2026-03-27 23:10:39 +02:00
Elian Doran
4458d5b8f7 chore(core): fix more errors related to export 2026-03-27 22:27:18 +02:00
Elian Doran
a59d6dfb11 chore(core): fix most errors with export 2026-03-27 22:20:59 +02:00
Elian Doran
21e2cf10c2 chore(core): relocate export route 2026-03-27 21:57:43 +02:00
Elian Doran
c94ca00daa chore(core): relocate export service 2026-03-27 21:54:51 +02:00
Elian Doran
6c75df70e0 chore: solve type errors 2026-03-27 21:40:19 +02:00
Elian Doran
0211535f73 fix(edit-docs): missing zip primitives 2026-03-27 19:26:27 +02:00
Elian Doran
2d4027c214 fix(server): depending on unexported zip import service 2026-03-27 19:18:31 +02:00
Elian Doran
5b3fb315d7 fix(core): on new database, opening hidden notes instead of the root 2026-03-27 19:16:23 +02:00
Elian Doran
24650edd62 fix(setup): demo DB not respected 2026-03-27 19:03:39 +02:00
Elian Doran
d29d1428ed feat(standalone/import): import demo DB 2026-03-27 18:55:18 +02:00
Elian Doran
91d526b15f feat(standalone/import): improve importing speed 2026-03-27 18:27:19 +02:00
Elian Doran
22c86cf3b5 feat(standalone): basic ZIP support 2026-03-27 18:11:59 +02:00
Elian Doran
a0573c439b fix(core): extension lookup failing in standalone 2026-03-27 17:54:37 +02:00
Elian Doran
050cdd0a85 chore(core): add a few missing constants 2026-03-27 17:00:13 +02:00
Elian Doran
55f09fe21a chore(core): fix usage of Buffer 2026-03-27 16:45:44 +02:00
Elian Doran
f069b41df6 chore(standalone): upload middleware with error handling 2026-03-27 16:40:23 +02:00
Elian Doran
f81369d643 feat(core): support md5 hash 2026-03-27 14:33:52 +02:00
Elian Doran
f1d7d34f1a chore(core): align tsconfig 2026-03-27 14:28:48 +02:00
Elian Doran
ce1f7a4274 chore(scripts): deduplicate errors listing 2026-03-27 14:28:40 +02:00
Elian Doran
6ce1d31ceb chore(import): integrate import route into core 2026-03-27 11:45:02 +02:00
Elian Doran
ecb467f2b7 chore(import): fix a few type errors 2026-03-27 11:40:48 +02:00
Elian Doran
4ffaadd481 chore(import): move all services to core (with errors) 2026-03-27 11:40:06 +02:00
94 changed files with 831 additions and 946 deletions

View File

@@ -40,12 +40,14 @@
"color": "5.0.3",
"debounce": "3.0.0",
"draggabilly": "3.0.0",
"fflate": "0.8.2",
"force-graph": "1.51.2",
"globals": "17.4.0",
"i18next": "25.10.10",
"i18next-http-backend": "3.0.2",
"jquery": "4.0.0",
"jquery.fancytree": "2.38.5",
"js-md5": "0.8.3",
"js-sha1": "0.7.0",
"js-sha256": "0.11.1",
"js-sha512": "0.9.0",

View File

@@ -5,6 +5,12 @@
import { getContext, routes } from "@triliumnext/core";
export interface UploadedFile {
originalname: string;
mimetype: string;
buffer: Uint8Array;
}
export interface BrowserRequest {
method: string;
url: string;
@@ -13,6 +19,7 @@ export interface BrowserRequest {
query: Record<string, string | undefined>;
headers?: Record<string, string>;
body?: unknown;
file?: UploadedFile;
}
export interface BrowserResponse {
@@ -154,8 +161,9 @@ export class BrowserRouter {
const query = parseQuery(url.search);
const upperMethod = method.toUpperCase();
// Parse JSON body if it's an ArrayBuffer and content-type suggests JSON
// Parse body based on content-type
let parsedBody = body;
let uploadedFile: UploadedFile | undefined;
if (body instanceof ArrayBuffer && headers) {
const contentType = headers['content-type'] || headers['Content-Type'] || '';
if (contentType.includes('application/json')) {
@@ -166,9 +174,31 @@ export class BrowserRouter {
}
} catch (e) {
console.warn('[Router] Failed to parse JSON body:', e);
// Keep original body if JSON parsing fails
parsedBody = body;
}
} else if (contentType.includes('multipart/form-data')) {
try {
// Reconstruct a Response so we can use the native FormData parser
const response = new Response(body, { headers: { 'content-type': contentType } });
const formData = await response.formData();
const formFields: Record<string, string> = {};
for (const [key, value] of formData.entries()) {
if (typeof value === 'string') {
formFields[key] = value;
} else {
// File field (Blob) — multer uses the field name "upload"
const fileBuffer = new Uint8Array(await value.arrayBuffer());
uploadedFile = {
originalname: value.name,
mimetype: value.type || 'application/octet-stream',
buffer: fileBuffer
};
}
}
parsedBody = formFields;
} catch (e) {
console.warn('[Router] Failed to parse multipart body:', e);
}
}
}
// Find matching route
@@ -191,7 +221,8 @@ export class BrowserRouter {
params,
query,
headers: headers ?? {},
body: parsedBody
body: parsedBody,
file: uploadedFile
};
try {

View File

@@ -35,6 +35,7 @@ function toExpressLikeReq(req: BrowserRequest) {
body: req.body,
headers: req.headers ?? {},
method: req.method,
file: req.file,
get originalUrl() { return req.url; }
};
}
@@ -121,6 +122,45 @@ function createRoute(router: BrowserRouter) {
};
}
/**
* Async variant of createRoute for handlers that return Promises (e.g. import).
* Uses transactionalAsync (manual BEGIN/COMMIT/ROLLBACK) instead of the synchronous
* transactional() wrapper, which would commit an empty transaction immediately when
* passed an async callback.
*/
function createAsyncRoute(router: BrowserRouter) {
return (method: HttpMethod, path: string, _middleware: any[], handler: (req: any, res: any) => Promise<unknown>, resultHandler?: ((req: any, res: any, result: unknown) => unknown) | null) => {
router.register(method, path, (req: BrowserRequest) => {
return getContext().init(async () => {
setContextFromHeaders(req);
const expressLikeReq = toExpressLikeReq(req);
const mockRes = createMockExpressResponse();
const result = await getSql().transactionalAsync(() => handler(expressLikeReq, mockRes));
// If the handler used the mock response (e.g. image routes that call res.send()),
// return it as a raw response so BrowserRouter doesn't JSON-serialize it.
if (mockRes._used) {
return {
[RAW_RESPONSE]: true as const,
status: mockRes._status,
headers: mockRes._headers,
body: mockRes._body
};
}
if (resultHandler) {
// Create a minimal response object that captures what apiResultHandler sets.
const res = createResultHandlerResponse();
resultHandler(expressLikeReq, res, result);
return res.result;
}
return result;
});
});
};
}
/**
* Creates a mock Express response object that captures calls to set(), send(), sendStatus(), etc.
* Used for route handlers (like image routes) that write directly to the response.
@@ -219,7 +259,7 @@ export function registerRoutes(router: BrowserRouter): void {
const apiRoute = createApiRoute(router, true);
routes.buildSharedApiRoutes({
route: createRoute(router),
asyncRoute: createRoute(router),
asyncRoute: createAsyncRoute(router),
apiRoute,
asyncApiRoute: createApiRoute(router, false),
apiResultHandler,
@@ -227,7 +267,9 @@ export function registerRoutes(router: BrowserRouter): void {
checkApiAuthOrElectron: noopMiddleware,
checkAppNotInitialized,
checkCredentials: noopMiddleware,
loginRateLimiter: noopMiddleware
loginRateLimiter: noopMiddleware,
uploadMiddlewareWithErrorHandling: noopMiddleware,
csrfMiddleware: noopMiddleware
});
apiRoute('get', '/bootstrap', bootstrapRoute);

View File

@@ -2,6 +2,7 @@ import type { CryptoProvider } from "@triliumnext/core";
import { sha1 } from "js-sha1";
import { sha256 } from "js-sha256";
import { sha512 } from "js-sha512";
import { md5 } from "js-md5";
interface Cipher {
update(data: Uint8Array): Uint8Array;
@@ -15,11 +16,18 @@ const CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
*/
export default class BrowserCryptoProvider implements CryptoProvider {
createHash(algorithm: "sha1" | "sha512", content: string | Uint8Array): Uint8Array {
createHash(algorithm: "md5" | "sha1" | "sha512", content: string | Uint8Array): Uint8Array {
const data = typeof content === "string" ? content :
new TextDecoder().decode(content);
const hexHash = algorithm === "sha1" ? sha1(data) : sha512(data);
let hexHash: string;
if (algorithm === "md5") {
hexHash = md5(data);
} else if (algorithm === "sha1") {
hexHash = sha1(data);
} else {
hexHash = sha512(data);
}
// Convert hex string to Uint8Array
const bytes = new Uint8Array(hexHash.length / 2);

View File

@@ -501,9 +501,12 @@ export default class BrowserSqlProvider implements DatabaseProvider {
// Helper function to execute within a transaction
const executeTransaction = (beginStatement: string, ...args: unknown[]): T => {
// If we're already in a transaction, use SAVEPOINTs for nesting
// This mimics better-sqlite3's behavior
if (self._inTransaction) {
// If we're already in a transaction (either tracked via JS flag or via actual SQLite
// autocommit state), use SAVEPOINTs for nesting — this handles the case where a manual
// BEGIN was issued directly (e.g. transactionalAsync) without going through transaction().
const sqliteInTransaction = self.db?.pointer !== undefined
&& (self.sqlite3!.capi as any).sqlite3_get_autocommit(self.db!.pointer) === 0;
if (self._inTransaction || sqliteInTransaction) {
const savepointName = `sp_${++savepointCounter}_${Date.now()}`;
self.db!.exec(`SAVEPOINT ${savepointName}`);
try {

View File

@@ -0,0 +1,18 @@
import { type ExportFormat, type ZipExportProviderData, ZipExportProvider } from "@triliumnext/core";
import contentCss from "@triliumnext/ckeditor5/src/theme/ck-content.css?raw";
export async function standaloneZipExportProviderFactory(format: ExportFormat, data: ZipExportProviderData): Promise<ZipExportProvider> {
switch (format) {
case "html": {
const { default: HtmlExportProvider } = await import("@triliumnext/core/src/services/export/zip/html.js");
return new HtmlExportProvider(data, { contentCss });
}
case "markdown": {
const { default: MarkdownExportProvider } = await import("@triliumnext/core/src/services/export/zip/markdown.js");
return new MarkdownExportProvider(data);
}
default:
throw new Error(`Unsupported export format: '${format}'`);
}
}

View File

@@ -0,0 +1,79 @@
import type { FileStream, ZipArchive, ZipEntry, ZipProvider } from "@triliumnext/core/src/services/zip_provider.js";
import { strToU8, unzip, zipSync } from "fflate";
type ZipOutput = {
send?: (body: unknown) => unknown;
write?: (chunk: Uint8Array | string) => unknown;
end?: (chunk?: Uint8Array | string) => unknown;
};
class BrowserZipArchive implements ZipArchive {
readonly #entries: Record<string, Uint8Array> = {};
#destination: ZipOutput | null = null;
append(content: string | Uint8Array, options: { name: string }) {
this.#entries[options.name] = typeof content === "string" ? strToU8(content) : content;
}
pipe(destination: unknown) {
this.#destination = destination as ZipOutput;
}
async finalize(): Promise<void> {
if (!this.#destination) {
throw new Error("ZIP output destination not set.");
}
const content = zipSync(this.#entries, { level: 9 });
if (typeof this.#destination.send === "function") {
this.#destination.send(content);
return;
}
if (typeof this.#destination.end === "function") {
if (typeof this.#destination.write === "function") {
this.#destination.write(content);
this.#destination.end();
} else {
this.#destination.end(content);
}
return;
}
throw new Error("Unsupported ZIP output destination.");
}
}
export default class BrowserZipProvider implements ZipProvider {
createZipArchive(): ZipArchive {
return new BrowserZipArchive();
}
createFileStream(_filePath: string): FileStream {
throw new Error("File stream creation is not supported in the browser.");
}
readZipFile(
buffer: Uint8Array,
processEntry: (entry: ZipEntry, readContent: () => Promise<Uint8Array>) => Promise<void>
): Promise<void> {
return new Promise<void>((res, rej) => {
unzip(buffer, async (err, files) => {
if (err) { rej(err); return; }
try {
for (const [fileName, data] of Object.entries(files)) {
await processEntry(
{ fileName },
() => Promise.resolve(data)
);
}
res();
} catch (e) {
rej(e);
}
});
});
}
}

View File

@@ -55,6 +55,7 @@ let BrowserSqlProvider: typeof import('./lightweight/sql_provider').default;
let WorkerMessagingProvider: typeof import('./lightweight/messaging_provider').default;
let BrowserExecutionContext: typeof import('./lightweight/cls_provider').default;
let BrowserCryptoProvider: typeof import('./lightweight/crypto_provider').default;
let BrowserZipProvider: typeof import('./lightweight/zip_provider').default;
let FetchRequestProvider: typeof import('./lightweight/request_provider').default;
let StandalonePlatformProvider: typeof import('./lightweight/platform_provider').default;
let translationProvider: typeof import('./lightweight/translation_provider').default;
@@ -82,6 +83,7 @@ async function loadModules(): Promise<void> {
messagingModule,
clsModule,
cryptoModule,
zipModule,
requestModule,
platformModule,
translationModule,
@@ -91,6 +93,7 @@ async function loadModules(): Promise<void> {
import('./lightweight/messaging_provider.js'),
import('./lightweight/cls_provider.js'),
import('./lightweight/crypto_provider.js'),
import('./lightweight/zip_provider.js'),
import('./lightweight/request_provider.js'),
import('./lightweight/platform_provider.js'),
import('./lightweight/translation_provider.js'),
@@ -101,6 +104,7 @@ async function loadModules(): Promise<void> {
WorkerMessagingProvider = messagingModule.default;
BrowserExecutionContext = clsModule.default;
BrowserCryptoProvider = cryptoModule.default;
BrowserZipProvider = zipModule.default;
FetchRequestProvider = requestModule.default;
StandalonePlatformProvider = platformModule.default;
translationProvider = translationModule.default;
@@ -152,11 +156,18 @@ async function initialize(): Promise<void> {
await coreModule.initializeCore({
executionContext: new BrowserExecutionContext(),
crypto: new BrowserCryptoProvider(),
zip: new BrowserZipProvider(),
zipExportProviderFactory: (await import("./lightweight/zip_export_provider_factory.js")).standaloneZipExportProviderFactory,
messaging: messagingProvider!,
request: new FetchRequestProvider(),
platform: new StandalonePlatformProvider(queryString),
translations: translationProvider,
schema: schemaModule.default,
getDemoArchive: async () => {
const response = await fetch("/server-assets/db/demo.zip");
if (!response.ok) return null;
return new Uint8Array(await response.arrayBuffer());
},
dbConfig: {
provider: sqlProvider!,
isReadOnly: false,

View File

@@ -162,6 +162,13 @@ self.addEventListener("fetch", (event) => {
// Only handle same-origin
if (url.origin !== self.location.origin) return;
// API-ish: local-first via bridge (must be checked before navigate handling,
// because export triggers a navigation to an /api/ URL)
if (isLocalFirst(url)) {
event.respondWith(forwardToClientLocalServer(event.request, event.clientId));
return;
}
// HTML files: network-first to ensure updates are reflected immediately
if (event.request.mode === "navigate" || url.pathname.endsWith(".html")) {
event.respondWith(networkFirst(event.request));
@@ -169,17 +176,11 @@ self.addEventListener("fetch", (event) => {
}
// Static assets: cache-first for performance
if (event.request.method === "GET" && !isLocalFirst(url)) {
if (event.request.method === "GET") {
event.respondWith(cacheFirst(event.request));
return;
}
// API-ish: local-first via bridge
if (isLocalFirst(url)) {
event.respondWith(forwardToClientLocalServer(event.request, event.clientId));
return;
}
// Default
event.respondWith(fetch(event.request));
});

View File

@@ -1,12 +0,0 @@
# These are supported funding model platforms
github: shalithasuranga
patreon: shalithasuranga
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

View File

@@ -1,18 +0,0 @@
# Developer tools' files
.lite_workspace.lua
# Neutralinojs binaries and builds
/bin
/dist
# Neutralinojs client (minified)
neutralino.js
# Neutralinojs related files
.storage
*.log
.tmp
# Files managed by the build script
resources
neutralino.config.json

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2021 Neutralinojs and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,15 +0,0 @@
# neutralinojs-minimal
The default template for a Neutralinojs app. It's possible to use your favorite frontend framework by using [these steps](https://neutralino.js.org/docs/getting-started/using-frontend-libraries).
## Contributors
[![Contributors](https://contrib.rocks/image?repo=neutralinojs/neutralinojs-minimal)](https://github.com/neutralinojs/neutralinojs-minimal/graphs/contributors)
## License
[MIT](LICENSE)
## Icon credits
- `trayIcon.png` - Made by [Freepik](https://www.freepik.com) and downloaded from [Flaticon](https://www.flaticon.com)

View File

@@ -1,83 +0,0 @@
{
"$schema": "https://raw.githubusercontent.com/neutralinojs/neutralinojs/main/schemas/neutralino.config.schema.json",
"applicationId": "js.neutralino.sample",
"version": "1.0.0",
"defaultMode": "window",
"port": 0,
"documentRoot": "/resources/",
"url": "/",
"enableServer": true,
"enableNativeAPI": true,
"tokenSecurity": "one-time",
"logging": {
"enabled": true,
"writeToLogFile": true
},
"nativeAllowList": [
"app.*",
"os.*",
"debug.log"
],
"globalVariables": {
"TEST1": "Hello",
"TEST2": [
2,
4,
5
],
"TEST3": {
"value1": 10,
"value2": {}
}
},
"modes": {
"window": {
"title": "Trilium Notes Lite",
"width": 800,
"height": 500,
"minWidth": 400,
"minHeight": 200,
"center": true,
"fullScreen": false,
"alwaysOnTop": false,
"icon": "/resources/assets/icon.png",
"enableInspector": true,
"borderless": false,
"maximize": false,
"hidden": false,
"resizable": true,
"exitProcessOnClose": true
},
"browser": {
"globalVariables": {
"TEST": "Test value browser"
},
"nativeBlockList": [
"filesystem.*"
]
},
"cloud": {
"url": "/resources/#cloud",
"nativeAllowList": [
"app.*"
]
},
"chrome": {
"width": 800,
"height": 500,
"args": "--user-agent=\"Neutralinojs chrome mode\"",
"nativeBlockList": [
"filesystem.*",
"os.*"
]
}
},
"cli": {
"binaryName": "standalone-desktop",
"resourcesPath": "/resources/",
"extensionsPath": "/extensions/",
"clientLibrary": "/resources/js/neutralino.js",
"binaryVersion": "6.4.0",
"clientVersion": "6.4.0"
}
}

View File

@@ -1,17 +0,0 @@
{
"name": "@triliumnext/standalone-desktop",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"build": "tsx scripts/build.ts",
"dev": "tsx scripts/dev.ts"
},
"keywords": [],
"author": "",
"license": "ISC",
"packageManager": "pnpm@10.28.0",
"devDependencies": {
"@neutralinojs/neu": "11.6.0"
}
}

View File

@@ -1,13 +0,0 @@
import { execSync } from "child_process";
import BuildHelper from "../../../scripts/build-utils";
const build = new BuildHelper("apps/standalone-desktop");
async function main() {
build.triggerBuildAndCopyTo("apps/client-standalone", "../resources");
build.copy("neutralino.template.config.json", "../neutralino.config.json");
execSync("pnpm neu update", { cwd: build.projectDir, stdio: "inherit" });
execSync("pnpm neu build", { cwd: build.projectDir, stdio: "inherit" });
}
main();

View File

@@ -1,18 +0,0 @@
import { execSync } from "child_process";
import { readFileSync, writeFileSync } from "fs";
function main() {
patchTemplate();
console.warn("Make sure to run the Vite dev server in a separate terminal:");
console.warn(" cd apps/client-standalone");
console.warn(" pnpm dev");
execSync("pnpm neu run", { stdio: "inherit" });
}
function patchTemplate() {
const template = JSON.parse(readFileSync("neutralino.template.config.json", "utf-8"));
template.url = "http://localhost:5173/";
writeFileSync("neutralino.config.json", JSON.stringify(template, null, 2), "utf-8");
}
main();

View File

@@ -1,6 +1,7 @@
import { getLog, initializeCore, sql_init } from "@triliumnext/core";
import ClsHookedExecutionContext from "@triliumnext/server/src/cls_provider.js";
import NodejsCryptoProvider from "@triliumnext/server/src/crypto_provider.js";
import NodejsZipProvider from "@triliumnext/server/src/zip_provider.js";
import dataDirs from "@triliumnext/server/src/services/data_dir.js";
import options from "@triliumnext/server/src/services/options.js";
import port from "@triliumnext/server/src/services/port.js";
@@ -133,12 +134,15 @@ async function main() {
}
},
crypto: new NodejsCryptoProvider(),
zip: new NodejsZipProvider(),
zipExportProviderFactory: (await import("@triliumnext/server/src/services/export/zip/factory.js")).serverZipExportProviderFactory,
request: new NodeRequestProvider(),
executionContext: new ClsHookedExecutionContext(),
messaging: new WebSocketMessagingProvider(),
schema: fs.readFileSync(require.resolve("@triliumnext/core/src/assets/schema.sql"), "utf-8"),
platform: new DesktopPlatformProvider(),
translations: (await import("@triliumnext/server/src/services/i18n.js")).initializeTranslations,
getDemoArchive: async () => fs.readFileSync(require.resolve("@triliumnext/server/src/assets/db/demo.zip")),
extraAppInfo: {
nodeVersion: process.version,
dataDirectory: path.resolve(dataDirs.TRILIUM_DATA_DIR)

View File

@@ -54,8 +54,8 @@ async function registerHandlers() {
}
async function exportData() {
const { exportToZipFile } = (await import("@triliumnext/server/src/services/export/zip.js")).default;
await exportToZipFile("root", "html", DEMO_ZIP_PATH);
const { zipExportService } = (await import("@triliumnext/core"));
await zipExportService.exportToZipFile("root", "html", DEMO_ZIP_PATH);
}
main();

View File

@@ -1,7 +1,6 @@
import debounce from "@triliumnext/client/src/services/debounce.js";
import type { AdvancedExportOptions, ExportFormat } from "@triliumnext/core";
import cls from "@triliumnext/server/src/services/cls.js";
import type { AdvancedExportOptions, ExportFormat } from "@triliumnext/server/src/services/export/zip/abstract_provider.js";
import { initializeTranslations } from "@triliumnext/server/src/services/i18n.js";
import { parseNoteMetaFile } from "@triliumnext/server/src/services/in_app_help.js";
import type { NoteMetaFile } from "@triliumnext/server/src/services/meta/note_meta.js";
import type NoteMeta from "@triliumnext/server/src/services/meta/note_meta.js";
@@ -11,7 +10,7 @@ import yaml from "js-yaml";
import path from "path";
import packageJson from "../package.json" with { type: "json" };
import { extractZip, importData, initializeDatabase, startElectron } from "./utils.js";
import { extractZip, importData, startElectron } from "./utils.js";
interface NoteMapping {
rootNoteId: string;
@@ -153,7 +152,7 @@ async function exportData(noteId: string, format: ExportFormat, outputPath: stri
await fsExtra.mkdir(outputPath);
// First export as zip.
const { exportToZipFile } = (await import("@triliumnext/server/src/services/export/zip.js")).default;
const { zipExportService } = (await import("@triliumnext/core"));
const exportOpts: AdvancedExportOptions = {};
if (format === "html") {
@@ -205,7 +204,7 @@ async function exportData(noteId: string, format: ExportFormat, outputPath: stri
};
}
await exportToZipFile(noteId, format, zipFilePath, exportOpts);
await zipExportService.exportToZipFile(noteId, format, zipFilePath, exportOpts);
await extractZip(zipFilePath, outputPath, ignoredFiles);
} finally {
if (await fsExtra.exists(zipFilePath)) {

View File

@@ -62,7 +62,7 @@ export function startElectron(callback: () => void): DeferredPromise<void> {
export async function importData(path: string) {
const buffer = await createImportZip(path);
const importService = (await import("@triliumnext/server/src/services/import/zip.js")).default;
const { zipImportService } = (await import("@triliumnext/core"));
const context = new TaskContext("no-progress-reporting", "importNotes", null);
const becca = (await import("@triliumnext/server/src/becca/becca.js")).default;
@@ -70,7 +70,7 @@ export async function importData(path: string) {
if (!rootNote) {
throw new Error("Missing root note for import.");
}
await importService.importZip(context, buffer, rootNote, {
await zipImportService.importZip(context, buffer, rootNote, {
preserveIds: true
});
}
@@ -106,19 +106,18 @@ function waitForEnd(archive: Archiver, stream: WriteStream) {
export async function extractZip(zipFilePath: string, outputPath: string, ignoredFiles?: Set<string>) {
const promise = deferred<void>();
setTimeout(async () => {
// Then extract the zip.
const { readZipFile, readContent } = (await import("@triliumnext/server/src/services/import/zip.js"));
await readZipFile(await fs.readFile(zipFilePath), async (zip, entry) => {
const { getZipProvider } = (await import("@triliumnext/core"));
const zipProvider = getZipProvider();
const buffer = await fs.readFile(zipFilePath);
await zipProvider.readZipFile(buffer, async (entry, readContent) => {
// We ignore directories since they can appear out of order anyway.
if (!entry.fileName.endsWith("/") && !ignoredFiles?.has(entry.fileName)) {
const destPath = path.join(outputPath, entry.fileName);
const fileContent = await readContent(zip, entry);
const fileContent = await readContent();
await fsExtra.mkdirs(path.dirname(destPath));
await fs.writeFile(destPath, fileContent);
}
zip.readEntry();
});
promise.resolve();
}, 1000);

View File

@@ -41,7 +41,6 @@
"@triliumnext/core": "workspace:*",
"@triliumnext/express-partial-content": "workspace:*",
"@triliumnext/highlightjs": "workspace:*",
"@triliumnext/turndown-plugin-gfm": "workspace:*",
"@types/archiver": "7.0.0",
"@types/better-sqlite3": "7.6.13",
"@types/cls-hooked": "4.3.9",
@@ -68,7 +67,6 @@
"axios": "1.13.6",
"bindings": "1.5.0",
"bootstrap": "5.3.8",
"chardet": "2.1.1",
"cheerio": "1.2.0",
"chokidar": "5.0.0",
"cls-hooked": "4.2.2",
@@ -108,8 +106,7 @@
"safe-compare": "1.1.4",
"sax": "1.6.0",
"serve-favicon": "2.5.1",
"stream-throttle": "0.1.3",
"strip-bom": "5.0.0",
"stream-throttle": "0.1.3",
"striptags": "3.2.0",
"supertest": "7.2.2",
"swagger-jsdoc": "6.2.8",

View File

@@ -2,8 +2,10 @@ import { beforeAll } from "vitest";
import { readFileSync } from "fs";
import { join } from "path";
import { initializeCore } from "@triliumnext/core";
import { serverZipExportProviderFactory } from "../src/services/export/zip/factory.js";
import ClsHookedExecutionContext from "../src/cls_provider.js";
import NodejsCryptoProvider from "../src/crypto_provider.js";
import NodejsZipProvider from "../src/zip_provider.js";
import ServerPlatformProvider from "../src/platform_provider.js";
import BetterSqlite3Provider from "../src/sql_provider.js";
import { initializeTranslations } from "../src/services/i18n.js";
@@ -27,6 +29,8 @@ beforeAll(async () => {
onTransactionRollback() {}
},
crypto: new NodejsCryptoProvider(),
zip: new NodejsZipProvider(),
zipExportProviderFactory: serverZipExportProviderFactory,
executionContext: new ClsHookedExecutionContext(),
schema: readFileSync(require.resolve("@triliumnext/core/src/assets/schema.sql"), "utf-8"),
platform: new ServerPlatformProvider(),

View File

@@ -6,7 +6,7 @@ const randtoken = generator({ source: "crypto" });
export default class NodejsCryptoProvider implements CryptoProvider {
createHash(algorithm: "sha1", content: string | Uint8Array): Uint8Array {
createHash(algorithm: "md5" | "sha1" | "sha512", content: string | Uint8Array): Uint8Array {
return crypto.createHash(algorithm).update(content).digest();
}

View File

@@ -1,11 +1,8 @@
import { NoteParams, SearchParams } from "@triliumnext/core";
import { type ExportFormat, NoteParams, SearchParams, zipExportService, zipImportService } from "@triliumnext/core";
import type { Request, Router } from "express";
import type { ParsedQs } from "qs";
import becca from "../becca/becca.js";
import zipExportService from "../services/export/zip.js";
import type { ExportFormat } from "../services/export/zip/abstract_provider.js";
import zipImportService from "../services/import/zip.js";
import noteService from "../services/notes.js";
import SearchContext from "../services/search/search_context.js";
import searchService from "../services/search/services/search.js";

View File

@@ -3,13 +3,14 @@
* are loaded later and will result in an empty string.
*/
import { getLog,initializeCore, sql_init } from "@triliumnext/core";
import { getLog, initializeCore, sql_init } from "@triliumnext/core";
import fs from "fs";
import { t } from "i18next";
import path from "path";
import ClsHookedExecutionContext from "./cls_provider.js";
import NodejsCryptoProvider from "./crypto_provider.js";
import NodejsZipProvider from "./zip_provider.js";
import ServerPlatformProvider from "./platform_provider.js";
import dataDirs from "./services/data_dir.js";
import port from "./services/port.js";
@@ -51,12 +52,15 @@ async function startApplication() {
}
},
crypto: new NodejsCryptoProvider(),
zip: new NodejsZipProvider(),
zipExportProviderFactory: (await import("./services/export/zip/factory.js")).serverZipExportProviderFactory,
request: new NodeRequestProvider(),
executionContext: new ClsHookedExecutionContext(),
messaging: new WebSocketMessagingProvider(),
schema: fs.readFileSync(require.resolve("@triliumnext/core/src/assets/schema.sql"), "utf-8"),
platform: new ServerPlatformProvider(),
translations: (await import("./services/i18n.js")).initializeTranslations,
getDemoArchive: async () => fs.readFileSync(require.resolve("@triliumnext/server/src/assets/db/demo.zip")),
extraAppInfo: {
nodeVersion: process.version,
dataDirectory: path.resolve(dataDirs.TRILIUM_DATA_DIR)

View File

@@ -1,30 +0,0 @@
import type { Request } from "express";
import markdownService from "../../services/import/markdown.js";
import markdown from "../../services/export/markdown.js";
import { RenderMarkdownResponse, ToMarkdownResponse } from "@triliumnext/commons";
function renderMarkdown(req: Request) {
const { markdownContent } = req.body;
if (!markdownContent || typeof markdownContent !== 'string') {
throw new Error('markdownContent parameter is required and must be a string');
}
return {
htmlContent: markdownService.renderToHtml(markdownContent, "")
} satisfies RenderMarkdownResponse;
}
function toMarkdown(req: Request) {
const { htmlContent } = req.body;
if (!htmlContent || typeof htmlContent !== 'string') {
throw new Error('htmlContent parameter is required and must be a string');
}
return {
markdownContent: markdown.toMarkdown(htmlContent)
} satisfies ToMarkdownResponse;
}
export default {
renderMarkdown,
toMarkdown
};

View File

@@ -22,13 +22,10 @@ import backendLogRoute from "./api/backend_log.js";
import clipperRoute from "./api/clipper.js";
import databaseRoute from "./api/database.js";
import etapiTokensApiRoutes from "./api/etapi_tokens.js";
import exportRoute from "./api/export.js";
import filesRoute from "./api/files.js";
import fontsRoute from "./api/fonts.js";
import importRoute from "./api/import.js";
import loginApiRoute from "./api/login.js";
import metricsRoute from "./api/metrics.js";
import otherRoute from "./api/other.js";
import passwordApiRoute from "./api/password.js";
import recoveryCodes from './api/recovery_codes.js';
import scriptRoute from "./api/script.js";
@@ -89,7 +86,9 @@ function register(app: express.Application) {
checkApiAuthOrElectron: auth.checkApiAuthOrElectron,
checkAppNotInitialized: auth.checkAppNotInitialized,
checkCredentials: auth.checkCredentials,
loginRateLimiter
loginRateLimiter,
uploadMiddlewareWithErrorHandling,
csrfMiddleware
});
route(PUT, "/api/notes/:noteId/file", [auth.checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], filesRoute.updateFile, apiResultHandler);
@@ -129,12 +128,6 @@ function register(app: express.Application) {
// TODO: Re-enable once we support route()
// route(GET, "/api/revisions/:revisionId/download", [auth.checkApiAuthOrElectron], revisionsApiRoute.downloadRevision);
route(GET, "/api/branches/:branchId/export/:type/:format/:version/:taskId", [auth.checkApiAuthOrElectron], exportRoute.exportBranch);
asyncRoute(PST, "/api/notes/:parentNoteId/notes-import", [auth.checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], importRoute.importNotesToBranch, apiResultHandler);
route(PST, "/api/notes/:parentNoteId/attachments-import", [auth.checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], importRoute.importAttachmentsToNote, apiResultHandler);
// :filename is not used by trilium, but instead used for "save as" to assign a human-readable filename
apiRoute(PST, "/api/password/change", passwordApiRoute.changePassword);
apiRoute(PST, "/api/password/reset", passwordApiRoute.resetPassword);
@@ -199,8 +192,6 @@ function register(app: express.Application) {
asyncApiRoute(GET, "/api/backend-log", backendLogRoute.getBackendLog);
route(GET, "/api/fonts", [auth.checkApiAuthOrElectron], fontsRoute.getFontCss);
apiRoute(PST, "/api/other/render-markdown", otherRoute.renderMarkdown);
apiRoute(PST, "/api/other/to-markdown", otherRoute.toMarkdown);
shareRoutes.register(router);

View File

@@ -1,5 +1,5 @@
import { type AttributeRow, dayjs, formatLogMessage } from "@triliumnext/commons";
import { type AbstractBeccaEntity, Becca, branches as branchService, NoteParams, SearchContext, sync_mutex as syncMutex } from "@triliumnext/core";
import { type AbstractBeccaEntity, Becca, branches as branchService, NoteParams, SearchContext, sync_mutex as syncMutex, zipExportService } from "@triliumnext/core";
import axios from "axios";
import * as cheerio from "cheerio";
import xml2js from "xml2js";
@@ -19,7 +19,6 @@ import backupService from "./backup.js";
import cloningService from "./cloning.js";
import config from "./config.js";
import dateNoteService from "./date_notes.js";
import exportService from "./export/zip.js";
import log from "./log.js";
import noteService from "./notes.js";
import optionsService from "./options.js";
@@ -662,7 +661,7 @@ function BackendScriptApi(this: Api, currentNote: BNote, apiParams: ApiParams) {
return { note: launcherNote };
};
this.exportSubtreeToZipFile = async (noteId, format, zipFilePath) => await exportService.exportToZipFile(noteId, format, zipFilePath);
this.exportSubtreeToZipFile = async (noteId, format, zipFilePath) => await zipExportService.exportToZipFile(noteId, format, zipFilePath);
this.runOnFrontend = async (_script, params = []) => {
let script: string;

View File

@@ -46,10 +46,6 @@ function putEntityChange(entityChange: EntityChange) {
cls.putEntityChange(entityChange);
}
function ignoreEntityChangeIds() {
cls.getContext().set("ignoreEntityChangeIds", true);
}
function get(key: string) {
return cls.getContext().get(key);
}
@@ -62,7 +58,10 @@ function reset() {
cls.getContext().reset();
}
/** @deprecated */
export const wrap = cls.wrap;
/** @deprecated */
export const ignoreEntityChangeIds = cls.ignoreEntityChangeIds;
export default {
init,

View File

@@ -0,0 +1,31 @@
import { type ExportFormat, ZipExportProvider, type ZipExportProviderData } from "@triliumnext/core";
import fs from "fs";
import path from "path";
import { getResourceDir, isDev } from "../../utils.js";
function readContentCss(): string {
const cssFile = isDev
? path.join(__dirname, "../../../../../../node_modules/ckeditor5/dist/ckeditor5-content.css")
: path.join(getResourceDir(), "ckeditor5-content.css");
return fs.readFileSync(cssFile, "utf-8");
}
export async function serverZipExportProviderFactory(format: ExportFormat, data: ZipExportProviderData): Promise<ZipExportProvider> {
switch (format) {
case "html": {
const { default: HtmlExportProvider } = await import("@triliumnext/core/src/services/export/zip/html.js");
return new HtmlExportProvider(data, { contentCss: readContentCss() });
}
case "markdown": {
const { default: MarkdownExportProvider } = await import("@triliumnext/core/src/services/export/zip/markdown.js");
return new MarkdownExportProvider(data);
}
case "share": {
const { default: ShareThemeExportProvider } = await import("./share_theme.js");
return new ShareThemeExportProvider(data);
}
default:
throw new Error(`Unsupported export format: '${format}'`);
}
}

View File

@@ -1,3 +1,4 @@
import { ExportFormat, icon_packs as iconPackService, ZipExportProvider } from "@triliumnext/core";
import ejs from "ejs";
import fs, { readdirSync, readFileSync } from "fs";
import { convert as convertToText } from "html-to-text";
@@ -9,12 +10,10 @@ import type BBranch from "../../../becca/entities/bbranch.js";
import type BNote from "../../../becca/entities/bnote.js";
import { getClientDir, getShareThemeAssetDir } from "../../../routes/assets";
import { getDefaultTemplatePath, readTemplate, renderNoteForExport } from "../../../share/content_renderer";
import { icon_packs as iconPackService } from "@triliumnext/core";
import log from "../../log";
import NoteMeta, { NoteMetaFile } from "../../meta/note_meta";
import { RESOURCE_DIR } from "../../resource_dir";
import { getResourceDir, isDev } from "../../utils";
import { ExportFormat, ZipExportProvider } from "./abstract_provider.js";
const shareThemeAssetDir = getShareThemeAssetDir();

View File

@@ -1,9 +1,7 @@
import { getCrypto,utils as coreUtils } from "@triliumnext/core";
import chardet from "chardet";
import { binary_utils,getCrypto, utils as coreUtils } from "@triliumnext/core";
import crypto from "crypto";
import { release as osRelease } from "os";
import path from "path";
import stripBom from "strip-bom";
const osVersion = osRelease().split('.').map(Number);
@@ -15,6 +13,7 @@ export const isWindows11 = isWindows && osVersion[0] === 10 && osVersion[2] >= 2
export const isElectron = !!process.versions["electron"];
/** @deprecated Use `isDev()` from `@triliumnext/core` instead. */
export const isDev = !!(process.env.TRILIUM_ENV && process.env.TRILIUM_ENV === "dev");
/** @deprecated */
@@ -27,10 +26,6 @@ export function randomString(length: number): string {
return coreUtils.randomString(length);
}
export function md5(content: crypto.BinaryLike) {
return crypto.createHash("md5").update(content).digest("hex");
}
/** @deprecated */
export function hashedBlobId(content: string | Buffer) {
return coreUtils.hashedBlobId(content);
@@ -138,35 +133,6 @@ export function getResourceDir() {
return path.join(__dirname, "..");
}
/**
* For buffers, they are scanned for a supported encoding and decoded (UTF-8, UTF-16). In some cases, the BOM is also stripped.
*
* For strings, they are returned immediately without any transformation.
*
* For nullish values, an empty string is returned.
*
* @param data the string or buffer to process.
* @returns the string representation of the buffer, or the same string is it's a string.
*/
export function processStringOrBuffer(data: string | Buffer | null) {
if (!data) {
return "";
}
if (!Buffer.isBuffer(data)) {
return data;
}
const detectedEncoding = chardet.detect(data);
switch (detectedEncoding) {
case "UTF-16LE":
return stripBom(data.toString("utf-16le"));
case "UTF-8":
default:
return data.toString("utf-8");
}
}
/** @deprecated */
export const escapeHtml = coreUtils.escapeHtml;
/** @deprecated */
@@ -183,6 +149,7 @@ export const isEmptyOrWhitespace = coreUtils.isEmptyOrWhitespace;
export const normalizeUrl = coreUtils.normalizeUrl;
export const timeLimit = coreUtils.timeLimit;
export const sanitizeSqlIdentifier = coreUtils.sanitizeSqlIdentifier;
export const processStringOrBuffer = binary_utils.processStringOrBuffer;
export function waitForStreamToFinish(stream: any): Promise<void> {
return new Promise((resolve, reject) => {
@@ -207,7 +174,6 @@ export default {
isMac,
isStringNote,
isWindows,
md5,
newEntityId,
normalize,
quoteRegex,

View File

@@ -1,6 +1,5 @@
import { renderSpreadsheetToHtml } from "@triliumnext/commons";
import { sanitize } from "@triliumnext/core";
import { icon_packs as iconPackService } from "@triliumnext/core";
import { icon_packs as iconPackService, sanitize, utils } from "@triliumnext/core";
import { highlightAuto } from "@triliumnext/highlightjs";
import ejs from "ejs";
import escapeHtml from "escape-html";
@@ -16,7 +15,7 @@ import BNote from "../becca/entities/bnote.js";
import assetPath, { assetUrlFragment } from "../services/asset_path.js";
import log from "../services/log.js";
import options from "../services/options.js";
import utils, { getResourceDir, isDev, safeExtractMessageAndStackFromError } from "../services/utils.js";
import { getResourceDir, isDev } from "../services/utils.js";
import SAttachment from "./shaca/entities/sattachment.js";
import SBranch from "./shaca/entities/sbranch.js";
import type SNote from "./shaca/entities/snote.js";
@@ -224,7 +223,7 @@ function renderNoteContentInternal(note: SNote | BNote, renderArgs: RenderArgs)
return ejs.render(content, opts, { includer });
}
} catch (e: unknown) {
const [errMessage, errStack] = safeExtractMessageAndStackFromError(e);
const [errMessage, errStack] = utils.safeExtractMessageAndStackFromError(e);
log.error(`Rendering user provided share template (${templateId}) threw exception ${errMessage} with stacktrace: ${errStack}`);
}
}

View File

@@ -0,0 +1,85 @@
import type { FileStream, ZipArchive, ZipEntry, ZipProvider } from "@triliumnext/core/src/services/zip_provider.js";
import archiver, { type Archiver } from "archiver";
import fs from "fs";
import type { Stream } from "stream";
import * as yauzl from "yauzl";
class NodejsZipArchive implements ZipArchive {
readonly #archive: Archiver;
constructor() {
this.#archive = archiver("zip", {
zlib: { level: 9 }
});
}
append(content: string | Uint8Array, options: { name: string; date?: Date }) {
this.#archive.append(typeof content === "string" ? content : Buffer.from(content), options);
}
pipe(destination: unknown) {
this.#archive.pipe(destination as NodeJS.WritableStream);
}
finalize(): Promise<void> {
return this.#archive.finalize();
}
}
function streamToBuffer(stream: Stream): Promise<Buffer> {
const chunks: Uint8Array[] = [];
stream.on("data", (chunk: Uint8Array) => chunks.push(chunk));
return new Promise((res, rej) => {
stream.on("end", () => res(Buffer.concat(chunks)));
stream.on("error", rej);
});
}
export default class NodejsZipProvider implements ZipProvider {
createZipArchive(): ZipArchive {
return new NodejsZipArchive();
}
createFileStream(filePath: string): FileStream {
const stream = fs.createWriteStream(filePath);
return {
destination: stream,
waitForFinish: () => new Promise((resolve, reject) => {
stream.on("finish", resolve);
stream.on("error", reject);
})
};
}
readZipFile(
buffer: Uint8Array,
processEntry: (entry: ZipEntry, readContent: () => Promise<Uint8Array>) => Promise<void>
): Promise<void> {
return new Promise<void>((res, rej) => {
yauzl.fromBuffer(Buffer.from(buffer), { lazyEntries: true, validateEntrySizes: false }, (err, zipfile) => {
if (err) { rej(err); return; }
if (!zipfile) { rej(new Error("Unable to read zip file.")); return; }
zipfile.readEntry();
zipfile.on("entry", async (entry: yauzl.Entry) => {
try {
const readContent = () => new Promise<Uint8Array>((res, rej) => {
zipfile.openReadStream(entry, (err, readStream) => {
if (err) { rej(err); return; }
if (!readStream) { rej(new Error("Unable to read content.")); return; }
streamToBuffer(readStream).then(res, rej);
});
});
await processEntry({ fileName: entry.fileName }, readContent);
} catch (e) {
rej(e);
}
zipfile.readEntry();
});
zipfile.on("end", res);
zipfile.on("error", rej);
});
});
}
}

View File

@@ -9,13 +9,16 @@
"dependencies": {
"@braintree/sanitize-url": "7.1.1",
"@triliumnext/commons": "workspace:*",
"@triliumnext/turndown-plugin-gfm": "workspace:*",
"async-mutex": "0.5.0",
"chardet": "2.1.1",
"escape-html": "1.0.3",
"i18next": "25.10.10",
"mime-types": "3.0.2",
"node-html-parser": "7.1.0",
"sanitize-filename": "1.6.4",
"sanitize-html": "2.17.2",
"strip-bom": "5.0.0",
"unescape": "1.0.1"
},
"devDependencies": {

View File

@@ -6,9 +6,12 @@ import { SqlService, SqlServiceParams } from "./services/sql/sql";
import { initMessaging, MessagingProvider } from "./services/messaging/index";
import { initRequest, RequestProvider } from "./services/request";
import { initTranslations, TranslationProvider } from "./services/i18n";
import { initSchema } from "./services/sql_init";
import { initSchema, initDemoArchive } from "./services/sql_init";
import appInfo from "./services/app_info";
import { type PlatformProvider, initPlatform } from "./services/platform";
import { type ZipProvider, initZipProvider } from "./services/zip_provider";
import { type ZipExportProviderFactory, initZipExportProviderFactory } from "./services/export/zip_export_provider_factory";
import markdown from "./services/import/markdown";
export { getLog } from "./services/log";
export type * from "./services/sql/types";
@@ -99,18 +102,31 @@ export type { RequestProvider, ExecOpts, CookieJar } from "./services/request";
export type * from "./meta";
export * as routeHelpers from "./routes/helpers";
export { getZipProvider, type ZipArchive, type ZipProvider } from "./services/zip_provider";
export { default as zipImportService } from "./services/import/zip";
export { default as zipExportService } from "./services/export/zip";
export { type AdvancedExportOptions, type ZipExportProviderData } from "./services/export/zip/abstract_provider";
export { ZipExportProvider } from "./services/export/zip/abstract_provider";
export { type ZipExportProviderFactory } from "./services/export/zip_export_provider_factory";
export { type ExportFormat } from "./meta";
export * as becca_easy_mocking from "./test/becca_easy_mocking";
export * as becca_mocking from "./test/becca_mocking";
export async function initializeCore({ dbConfig, executionContext, crypto, translations, messaging, request, schema, extraAppInfo, platform }: {
export { default as markdownImportService } from "./services/import/markdown";
export async function initializeCore({ dbConfig, executionContext, crypto, zip, zipExportProviderFactory, translations, messaging, request, schema, extraAppInfo, platform, getDemoArchive }: {
dbConfig: SqlServiceParams,
executionContext: ExecutionContext,
crypto: CryptoProvider,
zip: ZipProvider,
translations: TranslationProvider,
platform: PlatformProvider,
schema: string,
zipExportProviderFactory: ZipExportProviderFactory,
messaging?: MessagingProvider,
request?: RequestProvider,
getDemoArchive?: () => Promise<Uint8Array | null>,
extraAppInfo?: {
nodeVersion: string;
dataDirectory: string;
@@ -120,9 +136,14 @@ export async function initializeCore({ dbConfig, executionContext, crypto, trans
initLog();
await initTranslations(translations);
initCrypto(crypto);
initZipProvider(zip);
initZipExportProviderFactory(zipExportProviderFactory);
initContext(executionContext);
initSql(new SqlService(dbConfig, getLog()));
initSchema(schema);
if (getDemoArchive) {
initDemoArchive(getDemoArchive);
}
Object.assign(appInfo, extraAppInfo);
if (messaging) {
initMessaging(messaging);

View File

@@ -1,21 +1,21 @@
import { NotFoundError, ValidationError } from "@triliumnext/core";
import type { Request, Response } from "express";
import becca from "../../becca/becca.js";
import opmlExportService from "../../services/export/opml.js";
import singleExportService from "../../services/export/single.js";
import zipExportService from "../../services/export/zip.js";
import log from "../../services/log.js";
import { getLog } from "../../services/log.js";
import TaskContext from "../../services/task_context.js";
import { safeExtractMessageAndStackFromError } from "../../services/utils.js";
import { safeExtractMessageAndStackFromError } from "../../services/utils/index.js";
import { NotFoundError, ValidationError } from "../../errors.js";
function exportBranch(req: Request<{ branchId: string; type: string; format: string; version: string; taskId: string }>, res: Response) {
async function exportBranch(req: Request<{ branchId: string; type: string; format: string; version: string; taskId: string }>, res: Response) {
const { branchId, type, format, version, taskId } = req.params;
const branch = becca.getBranch(branchId);
if (!branch) {
const message = `Cannot export branch '${branchId}' since it does not exist.`;
log.error(message);
getLog().error(message);
res.setHeader("Content-Type", "text/plain").status(500).send(message);
return;
@@ -25,7 +25,7 @@ function exportBranch(req: Request<{ branchId: string; type: string; format: str
try {
if (type === "subtree" && (format === "html" || format === "markdown" || format === "share")) {
zipExportService.exportToZip(taskContext, branch, format, res);
await zipExportService.exportToZip(taskContext, branch, format, res);
} else if (type === "single") {
if (format !== "html" && format !== "markdown") {
throw new ValidationError("Invalid export type.");
@@ -41,7 +41,7 @@ function exportBranch(req: Request<{ branchId: string; type: string; format: str
const message = `Export failed with following error: '${errMessage}'. More details might be in the logs.`;
taskContext.reportError(message);
log.error(errMessage + errStack);
getLog().error(errMessage + errStack);
res.setHeader("Content-Type", "text/plain").status(500).send(message);
}

View File

@@ -1,19 +1,23 @@
import { becca_loader, ValidationError } from "@triliumnext/core";
import type { Request } from "express";
import path from "path";
import type { File } from "../../services/import/common.js";
type ImportRequest<P> = Omit<Request<P>, "file"> & { file?: File };
import becca from "../../becca/becca.js";
import type BNote from "../../becca/entities/bnote.js";
import cls from "../../services/cls.js";
import enexImportService from "../../services/import/enex.js";
// import enexImportService from "../../services/import/enex.js";
import opmlImportService from "../../services/import/opml.js";
import singleImportService from "../../services/import/single.js";
import zipImportService from "../../services/import/zip.js";
import log from "../../services/log.js";
import { getLog } from "../../services/log.js";
import TaskContext from "../../services/task_context.js";
import { safeExtractMessageAndStackFromError } from "../../services/utils.js";
import { safeExtractMessageAndStackFromError } from "../../services/utils/index.js";
import * as cls from "../../services/context.js";
import { ValidationError } from "../../errors.js";
import becca_loader from "../../becca/becca_loader.js";
import { extname } from "../../services/utils/path.js";
async function importNotesToBranch(req: Request<{ parentNoteId: string }>) {
async function importNotesToBranch(req: ImportRequest<{ parentNoteId: string }>) {
const { parentNoteId } = req.params;
const { taskId, last } = req.body;
@@ -34,7 +38,7 @@ async function importNotesToBranch(req: Request<{ parentNoteId: string }>) {
const parentNote = becca.getNoteOrThrow(parentNoteId);
const extension = path.extname(file.originalname).toLowerCase();
const extension = extname(file.originalname).toLowerCase();
// running all the event handlers on imported notes (and attributes) is slow
// and may produce unintended consequences
@@ -58,21 +62,22 @@ async function importNotesToBranch(req: Request<{ parentNoteId: string }>) {
return importResult;
}
} else if (extension === ".enex" && options.explodeArchives) {
const importResult = await enexImportService.importEnex(taskContext, file, parentNote);
if (!Array.isArray(importResult)) {
note = importResult;
} else {
return importResult;
}
throw "ENEX import is currently not supported. Please use the desktop app to import ENEX files and then sync with the server.";
// const importResult = await enexImportService.importEnex(taskContext, file, parentNote);
// if (!Array.isArray(importResult)) {
// note = importResult;
// } else {
// return importResult;
// }
} else {
note = await singleImportService.importSingleFile(taskContext, file, parentNote);
note = singleImportService.importSingleFile(taskContext, file, parentNote);
}
} catch (e: unknown) {
const [errMessage, errStack] = safeExtractMessageAndStackFromError(e);
const message = `Import failed with following error: '${errMessage}'. More details might be in the logs.`;
taskContext.reportError(message);
log.error(message + errStack);
getLog().error(message + errStack);
return [500, message];
}
@@ -99,7 +104,7 @@ async function importNotesToBranch(req: Request<{ parentNoteId: string }>) {
return note.getPojo();
}
function importAttachmentsToNote(req: Request<{ parentNoteId: string }>) {
function importAttachmentsToNote(req: ImportRequest<{ parentNoteId: string }>) {
const { parentNoteId } = req.params;
const { taskId, last } = req.body;
@@ -126,7 +131,7 @@ function importAttachmentsToNote(req: Request<{ parentNoteId: string }>) {
const message = `Import failed with following error: '${errMessage}'. More details might be in the logs.`;
taskContext.reportError(message);
log.error(message + errStack);
getLog().error(message + errStack);
return [500, message];
}

View File

@@ -1,5 +1,31 @@
import becca from "../../becca/becca";
import { RenderMarkdownResponse, ToMarkdownResponse } from "@triliumnext/commons";
import type { Request } from "express";
import markdown from "../../services/export/markdown.js";
import { markdownImportService } from "../..";
function renderMarkdown(req: Request) {
const { markdownContent } = req.body;
if (!markdownContent || typeof markdownContent !== 'string') {
throw new Error('markdownContent parameter is required and must be a string');
}
return {
htmlContent: markdownImportService.renderToHtml(markdownContent, "")
} satisfies RenderMarkdownResponse;
}
function toMarkdown(req: Request) {
const { htmlContent } = req.body;
if (!htmlContent || typeof htmlContent !== 'string') {
throw new Error('htmlContent parameter is required and must be a string');
}
return {
markdownContent: markdown.toMarkdown(htmlContent)
} satisfies ToMarkdownResponse;
}
function getIconUsage() {
const iconClassToCountMap: Record<string, number> = {};
@@ -25,5 +51,7 @@ function getIconUsage() {
}
export default {
getIconUsage
getIconUsage,
renderMarkdown,
toMarkdown
}

View File

@@ -1,6 +1,5 @@
import { EditedNotesResponse, RevisionItem, RevisionPojo } from "@triliumnext/commons";
import type { Request, Response } from "express";
import path from "path";
import becca from "../../becca/becca.js";
import type BNote from "../../becca/entities/bnote.js";
@@ -10,6 +9,7 @@ import eraseService from "../../services/erase.js";
import { NotePojo } from "../../becca/becca-interface.js";
import { becca_service, binary_utils, cls, getSql } from "../../index.js";
import { formatDownloadTitle, getContentDisposition } from "../../services/utils/index.js";
import { extname } from "../../services/utils/path.js";
interface NotePath {
noteId: string;
@@ -67,7 +67,7 @@ function getRevisionFilename(revision: BRevision) {
throw new Error("Missing creation date for revision.");
}
const extension = path.extname(filename);
const extension = extname(filename);
const date = revision.dateCreated
.substr(0, 19)
.replace(" ", "_")

View File

@@ -15,7 +15,7 @@ function getStatus() {
async function setupNewDocument(req: Request) {
const { skipDemoDb } = req.query;
await sqlInit.createInitialDatabase(!!skipDemoDb);
await sqlInit.createInitialDatabase(skipDemoDb !== undefined);
}
function setupSyncFromServer(req: Request): Promise<SetupSyncFromServerResponse> {

View File

@@ -25,6 +25,8 @@ import similarNotesRoute from "./api/similar_notes";
import imageRoute from "./api/image";
import setupApiRoute from "./api/setup";
import filesRoute from "./api/files";
import importRoute from "./api/import";
import exportRoute from "./api/export";
// TODO: Deduplicate with routes.ts
const GET = "get",
@@ -44,9 +46,11 @@ interface SharedApiRoutesContext {
checkAppNotInitialized: any;
loginRateLimiter: any;
checkCredentials: any;
uploadMiddlewareWithErrorHandling: any;
csrfMiddleware: any;
}
export function buildSharedApiRoutes({ route, asyncRoute, apiRoute, asyncApiRoute, checkApiAuth, apiResultHandler, checkApiAuthOrElectron, checkAppNotInitialized, checkCredentials, loginRateLimiter }: SharedApiRoutesContext) {
export function buildSharedApiRoutes({ route, asyncRoute, apiRoute, asyncApiRoute, checkApiAuth, apiResultHandler, checkApiAuthOrElectron, checkAppNotInitialized, checkCredentials, loginRateLimiter, uploadMiddlewareWithErrorHandling, csrfMiddleware }: SharedApiRoutesContext) {
apiRoute(GET, '/api/tree', treeApiRoute.getTree);
apiRoute(PST, '/api/tree/load', treeApiRoute.load);
@@ -113,6 +117,7 @@ export function buildSharedApiRoutes({ route, asyncRoute, apiRoute, asyncApiRout
apiRoute(PUT, "/api/branches/:branchId/set-prefix", branchesApiRoute.setPrefix);
apiRoute(PUT, "/api/branches/set-prefix-batch", branchesApiRoute.setPrefixBatch);
// :filename is not used by trilium, but instead used for "save as" to assign a human-readable filename
route(GET, "/api/revisions/:revisionId/image/:filename", [checkApiAuthOrElectron], imageRoute.returnImageFromRevision);
route(GET, "/api/attachments/:attachmentId/image/:filename", [checkApiAuthOrElectron], imageRoute.returnAttachedImage);
route(GET, "/api/images/:noteId/:filename", [checkApiAuthOrElectron], imageRoute.returnImageFromNote);
@@ -136,6 +141,12 @@ export function buildSharedApiRoutes({ route, asyncRoute, apiRoute, asyncApiRout
route(PST, "/api/sync/queue-sector/:entityName/:sector", [checkApiAuth], syncApiRoute.queueSector, apiResultHandler);
route(GET, "/api/sync/stats", [], syncApiRoute.getStats, apiResultHandler);
//#region Import/export
asyncRoute(PST, "/api/notes/:parentNoteId/notes-import", [checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], importRoute.importNotesToBranch, apiResultHandler);
route(PST, "/api/notes/:parentNoteId/attachments-import", [checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], importRoute.importAttachmentsToNote, apiResultHandler);
asyncRoute(GET, "/api/branches/:branchId/export/:type/:format/:version/:taskId", [checkApiAuthOrElectron], exportRoute.exportBranch);
//#endregion
apiRoute(GET, "/api/quick-search/:searchString", searchRoute.quickSearch);
apiRoute(GET, "/api/search-note/:noteId", searchRoute.searchFromNote);
apiRoute(PST, "/api/search-and-execute-note/:noteId", searchRoute.searchAndExecute);
@@ -187,7 +198,11 @@ export function buildSharedApiRoutes({ route, asyncRoute, apiRoute, asyncApiRout
apiRoute(PST, "/api/bulk-action/affected-notes", bulkActionRoute.getAffectedNoteCount);
apiRoute(GET, "/api/app-info", appInfoRoute.getAppInfo);
apiRoute(GET, "/api/other/icon-usage", otherRoute.getIconUsage);
apiRoute(PST, "/api/other/render-markdown", otherRoute.renderMarkdown);
apiRoute(PST, "/api/other/to-markdown", otherRoute.toMarkdown);
asyncApiRoute(GET, "/api/similar-notes/:noteId", similarNotesRoute.getSimilarNotes);
apiRoute(PST, "/api/relation-map", relationMapApiRoute.getRelationMap);
apiRoute(GET, "/api/recent-changes/:ancestorNoteId", recentChangesApiRoute.getRecentChanges);

View File

@@ -77,3 +77,7 @@ export function getAndClearEntityChangeIds() {
return entityChangeIds;
}
export function ignoreEntityChangeIds() {
getContext().set("ignoreEntityChangeIds", true);
}

View File

@@ -5,7 +5,7 @@ interface Cipher {
export interface CryptoProvider {
createHash(algorithm: "sha1" | "sha512", content: string | Uint8Array): Uint8Array;
createHash(algorithm: "md5" | "sha1" | "sha512", content: string | Uint8Array): Uint8Array;
randomBytes(size: number): Uint8Array;
randomString(length: number): string;
createCipheriv(algorithm: "aes-128-cbc", key: Uint8Array, iv: Uint8Array): Cipher;

View File

@@ -1,9 +1,9 @@
import { utils } from "@triliumnext/core";
import type { Response } from "express";
import becca from "../../becca/becca.js";
import type BBranch from "../../becca/entities/bbranch.js";
import type TaskContext from "../task_context.js";
import { getContentDisposition, stripTags } from "../utils/index.js";
function exportToOpml(taskContext: TaskContext<"export">, branch: BBranch, version: string, res: Response) {
if (!["1.0", "2.0"].includes(version)) {
@@ -58,7 +58,7 @@ function exportToOpml(taskContext: TaskContext<"export">, branch: BBranch, versi
const filename = `${branch.prefix ? `${branch.prefix} - ` : ""}${note.title}.opml`;
res.setHeader("Content-Disposition", utils.getContentDisposition(filename));
res.setHeader("Content-Disposition", getContentDisposition(filename));
res.setHeader("Content-Type", "text/x-opml");
res.write(`<?xml version="1.0" encoding="UTF-8"?>
@@ -82,7 +82,7 @@ function exportToOpml(taskContext: TaskContext<"export">, branch: BBranch, versi
function prepareText(text: string) {
const newLines = text.replace(/(<p[^>]*>|<br\s*\/?>)/g, "\n").replace(/&nbsp;/g, " "); // nbsp isn't in XML standard (only HTML)
const stripped = utils.stripTags(newLines);
const stripped = stripTags(newLines);
const escaped = escapeXmlAttribute(stripped);

View File

@@ -8,9 +8,10 @@ import becca from "../../becca/becca.js";
import type BBranch from "../../becca/entities/bbranch.js";
import type BNote from "../../becca/entities/bnote.js";
import type TaskContext from "../task_context.js";
import { escapeHtml,getContentDisposition } from "../utils.js";
import { escapeHtml,getContentDisposition } from "../utils/index.js";
import mdService from "./markdown.js";
import type { ExportFormat } from "./zip/abstract_provider.js";
import { ExportFormat } from "../../meta.js";
import { encodeBase64 } from "../utils/binary.js";
function exportSingleNote(taskContext: TaskContext<"export">, branch: BBranch, format: ExportFormat, res: Response) {
const note = branch.getNote();
@@ -88,11 +89,11 @@ function inlineAttachments(content: string) {
}
const imageContent = note.getContent();
if (!Buffer.isBuffer(imageContent)) {
if (typeof imageContent === "string") {
return match;
}
const base64Content = imageContent.toString("base64");
const base64Content = encodeBase64(imageContent);
const srcValue = `data:${note.mime};base64,${base64Content}`;
return `src="${srcValue}"`;
@@ -105,11 +106,11 @@ function inlineAttachments(content: string) {
}
const attachmentContent = attachment.getContent();
if (!Buffer.isBuffer(attachmentContent)) {
if (typeof attachmentContent === "string") {
return match;
}
const base64Content = attachmentContent.toString("base64");
const base64Content = encodeBase64(attachmentContent);
const srcValue = `data:${attachment.mime};base64,${base64Content}`;
return `src="${srcValue}"`;
@@ -122,11 +123,11 @@ function inlineAttachments(content: string) {
}
const attachmentContent = attachment.getContent();
if (!Buffer.isBuffer(attachmentContent)) {
if (typeof attachmentContent === "string") {
return match;
}
const base64Content = attachmentContent.toString("base64");
const base64Content = encodeBase64(attachmentContent);
const hrefValue = `data:${attachment.mime};base64,${base64Content}`;
return `href="${hrefValue}" download="${escapeHtml(attachment.title)}"`;

View File

@@ -1,43 +1,32 @@
import { NoteType } from "@triliumnext/commons";
import { ValidationError } from "@triliumnext/core";
import archiver from "archiver";
import type { Response } from "express";
import fs from "fs";
import path from "path";
import sanitize from "sanitize-filename";
import packageInfo from "../../../package.json" with { type: "json" };
import becca from "../../becca/becca.js";
import BBranch from "../../becca/entities/bbranch.js";
import type BNote from "../../becca/entities/bnote.js";
import dateUtils from "../date_utils.js";
import log from "../log.js";
import type AttachmentMeta from "../meta/attachment_meta.js";
import type AttributeMeta from "../meta/attribute_meta.js";
import type NoteMeta from "../meta/note_meta.js";
import type { NoteMetaFile } from "../meta/note_meta.js";
import dateUtils from "../utils/date.js";
import { getLog } from "../log.js";
import protectedSessionService from "../protected_session.js";
import TaskContext from "../task_context.js";
import { getContentDisposition, waitForStreamToFinish } from "../utils.js";
import { AdvancedExportOptions, type ExportFormat, ZipExportProviderData } from "./zip/abstract_provider.js";
import HtmlExportProvider from "./zip/html.js";
import MarkdownExportProvider from "./zip/markdown.js";
import ShareThemeExportProvider from "./zip/share_theme.js";
import { getZipProvider } from "../zip_provider.js";
import { getContentDisposition } from "../utils/index"
import { AdvancedExportOptions, ZipExportProviderData } from "./zip/abstract_provider.js";
import { getZipExportProviderFactory } from "./zip_export_provider_factory.js";
import { AttachmentMeta, AttributeMeta, ExportFormat, NoteMeta, NoteMetaFile } from "../../meta";
import { ValidationError } from "../../errors";
import { extname } from "../utils/path";
async function exportToZip(taskContext: TaskContext<"export">, branch: BBranch, format: ExportFormat, res: Response | fs.WriteStream, setHeaders = true, zipExportOptions?: AdvancedExportOptions) {
if (!["html", "markdown", "share"].includes(format)) {
throw new ValidationError(`Only 'html', 'markdown' and 'share' allowed as export format, '${format}' given`);
}
const archive = archiver("zip", {
zlib: { level: 9 } // Sets the compression level.
});
// eslint-disable-next-line @typescript-eslint/no-explicit-any
async function exportToZip(taskContext: TaskContext<"export">, branch: BBranch, format: ExportFormat, res: Record<string, any>, setHeaders = true, zipExportOptions?: AdvancedExportOptions) {
const archive = getZipProvider().createZipArchive();
const rewriteFn = (zipExportOptions?.customRewriteLinks ? zipExportOptions?.customRewriteLinks(rewriteLinks, getNoteTargetUrl) : rewriteLinks);
const provider = buildProvider();
const provider = await buildProvider();
const log = getLog();
const noteIdToMeta: Record<string, NoteMeta> = {};
function buildProvider() {
async function buildProvider() {
const providerData: ZipExportProviderData = {
getNoteTargetUrl,
archive,
@@ -46,16 +35,7 @@ async function exportToZip(taskContext: TaskContext<"export">, branch: BBranch,
zipExportOptions
};
switch (format) {
case "html":
return new HtmlExportProvider(providerData);
case "markdown":
return new MarkdownExportProvider(providerData);
case "share":
return new ShareThemeExportProvider(providerData);
default:
throw new Error();
}
return getZipExportProviderFactory()(format, providerData);
}
function getUniqueFilename(existingFileNames: Record<string, number>, fileName: string) {
@@ -96,7 +76,7 @@ async function exportToZip(taskContext: TaskContext<"export">, branch: BBranch,
fileName = fileName.slice(0, 30 - croppedExt.length) + croppedExt;
}
const existingExtension = path.extname(fileName).toLowerCase();
const existingExtension = extname(fileName).toLowerCase();
const newExtension = provider.mapExtension(type, mime, existingExtension, format);
// if the note is already named with the extension (e.g. "image.jpg"), then it's silly to append the exact same extension again
@@ -343,7 +323,7 @@ async function exportToZip(taskContext: TaskContext<"export">, branch: BBranch,
content = prepareContent(noteMeta.title, content, noteMeta, undefined);
archive.append(typeof content === "string" ? content : Buffer.from(content), {
archive.append(typeof content === "string" ? content : new Uint8Array(content), {
name: filePathPrefix + noteMeta.dataFileName
});
@@ -361,7 +341,7 @@ async function exportToZip(taskContext: TaskContext<"export">, branch: BBranch,
if (noteMeta.dataFileName) {
const content = prepareContent(noteMeta.title, note.getContent(), noteMeta, note);
archive.append(content as string | Buffer, {
archive.append(content as string | Uint8Array, {
name: filePathPrefix + noteMeta.dataFileName,
date: dateUtils.parseDateTime(note.utcDateModified)
});
@@ -377,7 +357,7 @@ async function exportToZip(taskContext: TaskContext<"export">, branch: BBranch,
const attachment = note.getAttachmentById(attachmentMeta.attachmentId);
const content = attachment.getContent();
archive.append(typeof content === "string" ? content : Buffer.from(content), {
archive.append(typeof content === "string" ? content : new Uint8Array(content), {
name: filePathPrefix + attachmentMeta.dataFileName,
date: dateUtils.parseDateTime(note.utcDateModified)
});
@@ -471,7 +451,7 @@ async function exportToZip(taskContext: TaskContext<"export">, branch: BBranch,
async function exportToZipFile(noteId: string, format: ExportFormat, zipFilePath: string, zipExportOptions?: AdvancedExportOptions) {
const fileOutputStream = fs.createWriteStream(zipFilePath);
const { destination, waitForFinish } = getZipProvider().createFileStream(zipFilePath);
const taskContext = new TaskContext("no-progress-reporting", "export", null);
const note = becca.getNote(noteId);
@@ -480,10 +460,10 @@ async function exportToZipFile(noteId: string, format: ExportFormat, zipFilePath
throw new ValidationError(`Note ${noteId} not found.`);
}
await exportToZip(taskContext, note.getParentBranches()[0], format, fileOutputStream, false, zipExportOptions);
await waitForStreamToFinish(fileOutputStream);
await exportToZip(taskContext, note.getParentBranches()[0], format, destination as Record<string, any>, false, zipExportOptions);
await waitForFinish();
log.info(`Exported '${noteId}' with format '${format}' to '${zipFilePath}'`);
getLog().info(`Exported '${noteId}' with format '${format}' to '${zipFilePath}'`);
}
export default {

View File

@@ -1,13 +1,10 @@
import { NoteType } from "@triliumnext/commons";
import { ExportFormat } from "@triliumnext/core";
import { Archiver } from "archiver";
import mimeTypes from "mime-types";
import type BBranch from "../../../becca/entities/bbranch.js";
import type BNote from "../../../becca/entities/bnote.js";
import type { default as NoteMeta, NoteMetaFile } from "../../meta/note_meta.js";
export type { ExportFormat, NoteMeta } from "@triliumnext/core";
import { ExportFormat, NoteMeta, NoteMetaFile } from "../../../meta.js";
import type { ZipArchive } from "../../zip_provider.js";
type RewriteLinksFn = (content: string, noteMeta: NoteMeta) => string;
@@ -32,7 +29,7 @@ export interface AdvancedExportOptions {
export interface ZipExportProviderData {
branch: BBranch;
getNoteTargetUrl: (targetNoteId: string, sourceMeta: NoteMeta) => string | null;
archive: Archiver;
archive: ZipArchive;
zipExportOptions: AdvancedExportOptions | undefined;
rewriteFn: RewriteLinksFn;
}
@@ -40,7 +37,7 @@ export interface ZipExportProviderData {
export abstract class ZipExportProvider {
branch: BBranch;
getNoteTargetUrl: (targetNoteId: string, sourceMeta: NoteMeta) => string | null;
archive: Archiver;
archive: ZipArchive;
zipExportOptions?: AdvancedExportOptions;
rewriteFn: RewriteLinksFn;

View File

@@ -1,16 +1,24 @@
import fs from "fs";
import html from "html";
import path from "path";
import type NoteMeta from "../../meta/note_meta.js";
import { escapeHtml, getResourceDir, isDev } from "../../utils";
import { ZipExportProvider } from "./abstract_provider.js";
import { escapeHtml } from "../../utils/index";
import { ZipExportProvider, ZipExportProviderData } from "./abstract_provider.js";
import { NoteMeta } from "../../../meta";
export interface HtmlExportProviderOptions {
contentCss?: string;
}
export default class HtmlExportProvider extends ZipExportProvider {
private navigationMeta: NoteMeta | null = null;
private indexMeta: NoteMeta | null = null;
private cssMeta: NoteMeta | null = null;
private options: HtmlExportProviderOptions;
constructor(data: ZipExportProviderData, options?: HtmlExportProviderOptions) {
super(data);
this.options = options ?? {};
}
prepareMeta(metaFile) {
if (this.zipExportOptions?.skipExtraFiles) return;
@@ -170,11 +178,9 @@ export default class HtmlExportProvider extends ZipExportProvider {
return;
}
const cssFile = isDev
? path.join(__dirname, "../../../../../../node_modules/ckeditor5/dist/ckeditor5-content.css")
: path.join(getResourceDir(), "ckeditor5-content.css");
const cssContent = fs.readFileSync(cssFile, "utf-8");
this.archive.append(cssContent, { name: cssMeta.dataFileName });
if (this.options.contentCss) {
this.archive.append(this.options.contentCss, { name: cssMeta.dataFileName });
}
}
}

View File

@@ -1,4 +1,4 @@
import NoteMeta from "../../meta/note_meta";
import { NoteMeta } from "../../../meta.js";
import mdService from "../markdown.js";
import { ZipExportProvider } from "./abstract_provider.js";

View File

@@ -0,0 +1,15 @@
import type { ExportFormat } from "../../meta.js";
import type { ZipExportProvider, ZipExportProviderData } from "./zip/abstract_provider.js";
export type ZipExportProviderFactory = (format: ExportFormat, data: ZipExportProviderData) => Promise<ZipExportProvider>;
let factory: ZipExportProviderFactory | null = null;
export function initZipExportProviderFactory(f: ZipExportProviderFactory) {
factory = f;
}
export function getZipExportProviderFactory(): ZipExportProviderFactory {
if (!factory) throw new Error("ZipExportProviderFactory not initialized.");
return factory;
}

View File

@@ -1,5 +1,5 @@
export default {
saveImageToAttachment(noteId: string, imageBuffer: Uint8Array, title: string, b1: boolean, b2: boolean) {
saveImageToAttachment(noteId: string, imageBuffer: Uint8Array, title: string, b1?: boolean, b2?: boolean) {
console.warn("Image save ignored", noteId, title);
return {
@@ -10,5 +10,13 @@ export default {
updateImage(noteId: string, imageBuffer: Uint8Array, title: string) {
console.warn("Image update ignored", noteId, title);
},
saveImage(noteId: string, imageBuffer: Uint8Array, title: string, b1?: boolean, b2?: boolean) {
console.warn("Image save ignored", noteId, title);
return {
note: null
};
}
}

View File

@@ -1,5 +1,5 @@
export interface File {
originalname: string;
mimetype: string;
buffer: string | Buffer;
buffer: string | Buffer | Uint8Array;
}

View File

@@ -1,20 +1,22 @@
import type { AttributeType } from "@triliumnext/commons";
import { dayjs } from "@triliumnext/commons";
import { sanitize, utils } from "@triliumnext/core";
import sax from "sax";
import stream from "stream";
import { Throttle } from "stream-throttle";
import type BNote from "../../becca/entities/bnote.js";
import date_utils from "../date_utils.js";
import date_utils from "../utils/date.js";
import * as utils from "../utils/index.js";
import imageService from "../image.js";
import log from "../log.js";
import { getLog } from "../log.js";
import noteService from "../notes.js";
import protectedSessionService from "../protected_session.js";
import sql from "../sql.js";
import type TaskContext from "../task_context.js";
import { escapeHtml, fromBase64,md5 } from "../utils.js";
import { escapeHtml, md5 } from "../utils/index.js";
import { decodeBase64 } from "../utils/binary.js";
import type { File } from "./common.js";
import { sanitizeHtml } from "../sanitizer.js";
import { getSql } from "../sql/index.js";
/**
* date format is e.g. 20181121T193703Z or 2013-04-14T16:19:00.000Z (Mac evernote, see #3496)
@@ -38,7 +40,7 @@ interface Attribute {
interface Resource {
title: string;
content?: Buffer | string;
content?: Uint8Array | string;
mime?: string;
attributes: Attribute[];
}
@@ -117,7 +119,7 @@ function importEnex(taskContext: TaskContext<"importNotes">, file: File, parentN
"\u2611 "
);
content = sanitize.sanitizeHtml(content);
content = sanitizeHtml(content);
return content;
}
@@ -138,7 +140,7 @@ function importEnex(taskContext: TaskContext<"importNotes">, file: File, parentN
saxStream.on("error", (e) => {
// unhandled errors will throw, since this is a proper node event emitter.
log.error(`error when parsing ENEX file: ${e}`);
getLog().error(`error when parsing ENEX file: ${e}`);
// clear the error
(saxStream._parser as any).error = null;
saxStream._parser.resume();
@@ -235,6 +237,8 @@ function importEnex(taskContext: TaskContext<"importNotes">, file: File, parentN
}
});
const sql = getSql();
function updateDates(note: BNote, utcDateCreated?: string, utcDateModified?: string) {
// it's difficult to force custom dateCreated and dateModified to Note entity, so we do it post-creation with SQL
const dateCreated = formatDateTimeToLocalDbFormat(utcDateCreated, false);
@@ -295,7 +299,7 @@ function importEnex(taskContext: TaskContext<"importNotes">, file: File, parentN
}
if (typeof resource.content === "string") {
resource.content = fromBase64(resource.content);
resource.content = decodeBase64(resource.content);
}
const hash = md5(resource.content);
@@ -359,7 +363,7 @@ function importEnex(taskContext: TaskContext<"importNotes">, file: File, parentN
content += imageLink;
}
} catch (e: any) {
log.error(`error when saving image from ENEX file: ${e.message}`);
getLog().error(`error when saving image from ENEX file: ${e.message}`);
createFileNote();
}
} else {
@@ -367,7 +371,7 @@ function importEnex(taskContext: TaskContext<"importNotes">, file: File, parentN
}
}
content = sanitize.sanitizeHtml(content);
content = sanitizeHtml(content);
// save updated content with links to files/images
noteEntity.setContent(content);

View File

@@ -1,17 +1,15 @@
import { getMimeTypeFromMarkdownName, MIME_TYPE_AUTO } from "@triliumnext/commons";
import { normalizeMimeTypeForCKEditor } from "@triliumnext/commons";
import { sanitize } from "@triliumnext/core";
import { parse, Renderer, type Tokens,use } from "marked";
import { ADMONITION_TYPE_MAPPINGS } from "../export/markdown.js";
import utils from "../utils.js";
import wikiLinkInternalLink from "./markdown/wikilink_internal_link.js";
import wikiLinkTransclusion from "./markdown/wikilink_transclusion.js";
import importUtils from "./utils.js";
import { escapeHtml } from "../utils/index.js";
import { sanitizeHtml } from "../sanitizer.js";
const escape = utils.escapeHtml;
const escape = escapeHtml;
/**
* Keep renderer code up to date with https://github.com/markedjs/marked/blob/master/src/Renderer.ts.
@@ -151,7 +149,7 @@ function renderToHtml(content: string, title: string) {
// h1 handling needs to come before sanitization
html = importUtils.handleH1(html, title);
html = sanitize.sanitizeHtml(html);
html = sanitizeHtml(html);
// Add a trailing semicolon to CSS styles.
html = html.replaceAll(/(<(img|figure|col).*?style=".*?)"/g, "$1;\"");

View File

@@ -1,8 +1,8 @@
"use strict";
import mimeTypes from "mime-types";
import path from "path";
import { types as extToMime } from "mime-types";
import type { NoteType, TaskData } from "@triliumnext/commons";
import { extname } from "../utils/path";
const CODE_MIME_TYPES = new Set([
"application/json",
@@ -84,10 +84,10 @@ function getMime(fileName: string) {
return "text/x-dockerfile";
}
const ext = path.extname(fileNameLc);
const ext = extname(fileNameLc);
const mimeFromExt = EXTENSION_TO_MIME.get(ext);
return mimeFromExt || mimeTypes.lookup(fileNameLc);
return mimeFromExt || extToMime[ext.slice(1)] || false;
}
function getType(options: TaskData<"importNotes">, mime: string): NoteType {

View File

@@ -1,12 +1,10 @@
import { sanitize } from "@triliumnext/core";
import xml2js from "xml2js";
import type BNote from "../../becca/entities/bnote.js";
import noteService from "../../services/notes.js";
import protectedSessionService from "../protected_session.js";
import type TaskContext from "../task_context.js";
import { sanitizeHtml } from "../sanitizer.js";
const parseString = xml2js.parseString;
interface OpmlXml {
@@ -29,7 +27,7 @@ interface OpmlOutline {
outline: OpmlOutline[];
}
async function importOpml(taskContext: TaskContext<"importNotes">, fileBuffer: string | Buffer, parentNote: BNote) {
async function importOpml(taskContext: TaskContext<"importNotes">, fileBuffer: string | Uint8Array, parentNote: BNote) {
const xml = await new Promise<OpmlXml>((resolve, reject) => {
parseString(fileBuffer, (err: any, result: OpmlXml) => {
if (err) {
@@ -65,7 +63,7 @@ async function importOpml(taskContext: TaskContext<"importNotes">, fileBuffer: s
throw new Error(`Unrecognized OPML version ${opmlVersion}`);
}
content = sanitize.sanitizeHtml(content || "");
content = sanitizeHtml(content || "");
const { note } = noteService.createNewNote({
parentNoteId,

View File

@@ -1,26 +1,25 @@
import { beforeAll, describe, expect, it, vi } from "vitest";
import fs from "fs";
import path from "path";
import { fileURLToPath } from "url";
import { dirname } from "path";
import becca from "../../becca/becca.js";
import BNote from "../../becca/entities/bnote.js";
import TaskContext from "../task_context.js";
import cls from "../cls.js";
import sql_init from "../sql_init.js";
import single from "./single.js";
import stripBom from "strip-bom";
import { getContext } from "../context.js";
const scriptDir = dirname(fileURLToPath(import.meta.url));
async function testImport(fileName: string, mimetype: string) {
const buffer = fs.readFileSync(path.join(scriptDir, "samples", fileName));
const buffer = fs.readFileSync(`${scriptDir}/samples/${fileName}`);
const taskContext = TaskContext.getInstance("import-mdx", "importNotes", {
textImportedAsText: true,
codeImportedAsCode: true
});
return new Promise<{ buffer: Buffer; importedNote: BNote }>((resolve, reject) => {
cls.init(async () => {
getContext().init(async () => {
const rootNote = becca.getNote("root");
if (!rootNote) {
reject("Missing root note.");
@@ -36,6 +35,10 @@ async function testImport(fileName: string, mimetype: string) {
},
rootNote as BNote
);
if (importedNote === null) {
reject("Import failed.");
return;
}
resolve({
buffer,
importedNote

View File

@@ -1,16 +1,17 @@
import type { NoteType } from "@triliumnext/commons";
import { sanitize, utils } from "@triliumnext/core";
import type BNote from "../../becca/entities/bnote.js";
import imageService from "../../services/image.js";
import noteService from "../../services/notes.js";
import { processStringOrBuffer } from "../../services/utils.js";
import protectedSessionService from "../protected_session.js";
import type TaskContext from "../task_context.js";
import type { File } from "./common.js";
import markdownService from "./markdown.js";
import mimeService from "./mime.js";
import importUtils from "./utils.js";
import { getNoteTitle } from "../utils/index.js";
import { sanitizeHtml } from "../sanitizer.js";
import { processStringOrBuffer } from "../utils/binary.js";
function importSingleFile(taskContext: TaskContext<"importNotes">, file: File, parentNote: BNote) {
const mime = mimeService.getMime(file.originalname) || file.mimetype;
@@ -57,7 +58,7 @@ function importFile(taskContext: TaskContext<"importNotes">, file: File, parentN
const mime = mimeService.getMime(originalName) || file.mimetype;
const { note } = noteService.createNewNote({
parentNoteId: parentNote.noteId,
title: utils.getNoteTitle(originalName, mime === "application/pdf", { mime }),
title: getNoteTitle(originalName, mime === "application/pdf", { mime }),
content: file.buffer,
isProtected: parentNote.isProtected && protectedSessionService.isProtectedSessionAvailable(),
type: "file",
@@ -72,7 +73,7 @@ function importFile(taskContext: TaskContext<"importNotes">, file: File, parentN
}
function importCodeNote(taskContext: TaskContext<"importNotes">, file: File, parentNote: BNote) {
const title = utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const title = getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const content = processStringOrBuffer(file.buffer);
const detectedMime = mimeService.getMime(file.originalname) || file.mimetype;
const mime = mimeService.normalizeMimeType(detectedMime);
@@ -97,7 +98,7 @@ function importCodeNote(taskContext: TaskContext<"importNotes">, file: File, par
}
function importCustomType(taskContext: TaskContext<"importNotes">, file: File, parentNote: BNote, type: NoteType, mime: string) {
const title = utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const title = getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const content = processStringOrBuffer(file.buffer);
const { note } = noteService.createNewNote({
@@ -115,7 +116,7 @@ function importCustomType(taskContext: TaskContext<"importNotes">, file: File, p
}
function importPlainText(taskContext: TaskContext<"importNotes">, file: File, parentNote: BNote) {
const title = utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const title = getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const plainTextContent = processStringOrBuffer(file.buffer);
const htmlContent = convertTextToHtml(plainTextContent);
@@ -150,13 +151,13 @@ function convertTextToHtml(text: string) {
}
function importMarkdown(taskContext: TaskContext<"importNotes">, file: File, parentNote: BNote) {
const title = utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const title = getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const markdownContent = processStringOrBuffer(file.buffer);
let htmlContent = markdownService.renderToHtml(markdownContent, title);
if (taskContext.data?.safeImport) {
htmlContent = sanitize.sanitizeHtml(htmlContent);
htmlContent = sanitizeHtml(htmlContent);
}
const { note } = noteService.createNewNote({
@@ -179,12 +180,12 @@ function importHtml(taskContext: TaskContext<"importNotes">, file: File, parentN
// Try to get title from HTML first, fall back to filename
// We do this before sanitization since that turns all <h1>s into <h2>
const htmlTitle = importUtils.extractHtmlTitle(content);
const title = htmlTitle || utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const title = htmlTitle || getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
content = importUtils.handleH1(content, title);
if (taskContext?.data?.safeImport) {
content = sanitize.sanitizeHtml(content);
content = sanitizeHtml(content);
}
const { note } = noteService.createNewNote({

View File

@@ -1,6 +1,4 @@
"use strict";
import { unescapeHtml } from "../utils.js";
import { unescapeHtml } from "../utils";
function handleH1(content: string, title: string) {
let isFirstH1Handled = false;

View File

@@ -1,25 +1,24 @@
import { beforeAll, describe, expect, it, vi } from "vitest";
import fs from "fs";
import path from "path";
import { fileURLToPath } from "url";
import { dirname } from "path";
import zip, { removeTriliumTags } from "./zip.js";
import becca from "../../becca/becca.js";
import BNote from "../../becca/entities/bnote.js";
import TaskContext from "../task_context.js";
import cls from "../cls.js";
import sql_init from "../sql_init.js";
import { trimIndentation } from "@triliumnext/commons";
import { getContext } from "../context.js";
const scriptDir = dirname(fileURLToPath(import.meta.url));
async function testImport(fileName: string) {
const mdxSample = fs.readFileSync(path.join(scriptDir, "samples", fileName));
const mdxSample = fs.readFileSync(`${scriptDir}/samples/${fileName}`);
const taskContext = TaskContext.getInstance("import-mdx", "importNotes", {
textImportedAsText: true
});
return new Promise<{ importedNote: BNote; rootNote: BNote }>((resolve, reject) => {
cls.init(async () => {
getContext().init(async () => {
const rootNote = becca.getNote("root");
if (!rootNote) {
expect(rootNote).toBeTruthy();

View File

@@ -1,10 +1,6 @@
import { ALLOWED_NOTE_TYPES, type NoteType } from "@triliumnext/commons";
import { sanitize, utils } from "@triliumnext/core";
import path from "path";
import type { Stream } from "stream";
import yauzl from "yauzl";
import { basename, dirname } from "../utils/path.js";
import { getZipProvider } from "../zip_provider.js";
import becca from "../../becca/becca.js";
import BAttachment from "../../becca/entities/battachment.js";
@@ -12,16 +8,17 @@ import BAttribute from "../../becca/entities/battribute.js";
import BBranch from "../../becca/entities/bbranch.js";
import type BNote from "../../becca/entities/bnote.js";
import attributeService from "../../services/attributes.js";
import log from "../../services/log.js";
import { getLog } from "../../services/log.js";
import noteService from "../../services/notes.js";
import { newEntityId, processStringOrBuffer, unescapeHtml } from "../../services/utils.js";
import type AttributeMeta from "../meta/attribute_meta.js";
import type NoteMeta from "../meta/note_meta.js";
import { getNoteTitle, newEntityId, removeFileExtension, unescapeHtml } from "../../services/utils/index.js";
import { processStringOrBuffer } from "../../services/utils/binary.js";
import protectedSessionService from "../protected_session.js";
import type TaskContext from "../task_context.js";
import treeService from "../tree.js";
import markdownService from "./markdown.js";
import mimeService from "./mime.js";
import { AttributeMeta, NoteMeta } from "../../meta.js";
import { sanitizeHtml } from "../sanitizer.js";
interface MetaFile {
files: NoteMeta[];
@@ -31,7 +28,7 @@ interface ImportZipOpts {
preserveIds?: boolean;
}
async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Buffer, importRootNote: BNote, opts?: ImportZipOpts): Promise<BNote> {
async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Uint8Array, importRootNote: BNote, opts?: ImportZipOpts): Promise<BNote> {
/** maps from original noteId (in ZIP file) to newly generated noteId */
const noteIdMap: Record<string, string> = {};
/** type maps from original attachmentId (in ZIP file) to newly generated attachmentId */
@@ -140,7 +137,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
if (parentNoteMeta?.noteId) {
parentNoteId = parentNoteMeta.isImportRoot ? importRootNote.noteId : getNewNoteId(parentNoteMeta.noteId);
} else {
const parentPath = path.dirname(filePath);
const parentPath = dirname(filePath);
if (parentPath === ".") {
parentNoteId = importRootNote.noteId;
@@ -162,7 +159,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
// in case we lack metadata, we treat e.g. "Programming.html" and "Programming" as the same note
// (one data file, the other directory for children)
const filePathNoExt = utils.removeFileExtension(filePath);
const filePathNoExt = removeFileExtension(filePath);
if (filePathNoExt in createdPaths) {
return createdPaths[filePathNoExt];
@@ -199,7 +196,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
}
if (!attributeService.isAttributeType(attr.type)) {
log.error(`Unrecognized attribute type ${attr.type}`);
getLog().error(`Unrecognized attribute type ${attr.type}`);
continue;
}
@@ -217,8 +214,8 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
}
if (taskContext.data?.safeImport) {
attr.name = sanitize.sanitizeHtml(attr.name);
attr.value = sanitize.sanitizeHtml(attr.value);
attr.name = sanitizeHtml(attr.name);
attr.value = sanitizeHtml(attr.value);
}
attributes.push(attr);
@@ -234,7 +231,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
return;
}
const noteTitle = utils.getNoteTitle(filePath, !!taskContext.data?.replaceUnderscoresWithSpaces, noteMeta);
const noteTitle = getNoteTitle(filePath, !!taskContext.data?.replaceUnderscoresWithSpaces, noteMeta);
const parentNoteId = getParentNoteId(filePath, parentNoteMeta);
if (!parentNoteId) {
@@ -269,10 +266,10 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
url = url.substr(2);
}
absUrl = path.dirname(filePath);
absUrl = dirname(filePath);
while (url.startsWith("../")) {
absUrl = path.dirname(absUrl);
absUrl = dirname(absUrl);
url = url.substr(3);
}
@@ -318,7 +315,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
});
if (taskContext.data?.safeImport) {
content = sanitize.sanitizeHtml(content);
content = sanitizeHtml(content);
}
content = content.replace(/<html.*<body[^>]*>/gis, "");
@@ -333,7 +330,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
try {
url = decodeURIComponent(url).trim();
} catch (e: any) {
log.error(`Cannot parse image URL '${url}', keeping original. Error: ${e.message}.`);
getLog().error(`Cannot parse image URL '${url}', keeping original. Error: ${e.message}.`);
return `src="${url}"`;
}
@@ -344,9 +341,9 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
const target = getEntityIdFromRelativeUrl(url, filePath);
if (target.attachmentId) {
return `src="api/attachments/${target.attachmentId}/image/${path.basename(url)}"`;
return `src="api/attachments/${target.attachmentId}/image/${basename(url)}"`;
} else if (target.noteId) {
return `src="api/images/${target.noteId}/${path.basename(url)}"`;
return `src="api/images/${target.noteId}/${basename(url)}"`;
}
return match;
@@ -356,7 +353,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
try {
url = decodeURIComponent(url).trim();
} catch (e: any) {
log.error(`Cannot parse link URL '${url}', keeping original. Error: ${e.message}.`);
getLog().error(`Cannot parse link URL '${url}', keeping original. Error: ${e.message}.`);
return `href="${url}"`;
}
@@ -392,7 +389,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
return content;
}
function processNoteContent(noteMeta: NoteMeta | undefined, type: string, mime: string, content: string | Buffer, noteTitle: string, filePath: string) {
function processNoteContent(noteMeta: NoteMeta | undefined, type: string, mime: string, content: string | Uint8Array, noteTitle: string, filePath: string) {
if ((noteMeta?.format === "markdown" || (!noteMeta && taskContext.data?.textImportedAsText && ["text/markdown", "text/x-markdown", "text/mdx"].includes(mime))) && typeof content === "string") {
content = markdownService.renderToHtml(content, noteTitle);
}
@@ -414,7 +411,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
return content;
}
function saveNote(filePath: string, content: string | Buffer) {
function saveNote(filePath: string, content: string | Uint8Array) {
const { parentNoteMeta, noteMeta, attachmentMeta } = getMeta(filePath);
if (noteMeta?.noImport) {
@@ -467,7 +464,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
content = processStringOrBuffer(content);
}
const noteTitle = utils.getNoteTitle(filePath, taskContext.data?.replaceUnderscoresWithSpaces || false, noteMeta);
const noteTitle = getNoteTitle(filePath, taskContext.data?.replaceUnderscoresWithSpaces || false, noteMeta);
content = processNoteContent(noteMeta, type, mime, content, noteTitle || "", filePath);
@@ -551,46 +548,42 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
noteId,
type: "label",
name: "originalFileName",
value: path.basename(filePath)
value: basename(filePath)
});
}
}
// we're running two passes in order to obtain critical information first (meta file and root)
const topLevelItems = new Set<string>();
await readZipFile(fileBuffer, async (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => {
const zipProvider = getZipProvider();
await zipProvider.readZipFile(fileBuffer, async (entry, readContent) => {
const filePath = normalizeFilePath(entry.fileName);
// make sure that the meta file is loaded before the rest of the files is processed.
if (filePath === "!!!meta.json") {
const content = await readContent(zipfile, entry);
metaFile = JSON.parse(content.toString("utf-8"));
const content = await readContent();
metaFile = JSON.parse(new TextDecoder("utf-8").decode(content));
}
// determine the root of the .zip (i.e. if it has only one top-level folder then the root is that folder, or the root of the archive if there are multiple top-level folders).
const firstSlash = filePath.indexOf("/");
const topLevelPath = (firstSlash !== -1 ? filePath.substring(0, firstSlash) : filePath);
topLevelItems.add(topLevelPath);
zipfile.readEntry();
});
topLevelPath = (topLevelItems.size > 1 ? "" : topLevelItems.values().next().value ?? "");
await readZipFile(fileBuffer, async (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => {
await zipProvider.readZipFile(fileBuffer, async (entry, readContent) => {
const filePath = normalizeFilePath(entry.fileName);
if (/\/$/.test(entry.fileName)) {
saveDirectory(filePath);
} else if (filePath !== "!!!meta.json") {
const content = await readContent(zipfile, entry);
saveNote(filePath, content);
saveNote(filePath, await readContent());
}
taskContext.increaseProgressCount();
zipfile.readEntry();
});
for (const noteId of createdNoteIds) {
@@ -613,7 +606,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
if (attr.type !== "relation" || attr.value in becca.notes) {
new BAttribute(attr).save();
} else {
log.info(`Relation not imported since the target note doesn't exist: ${JSON.stringify(attr)}`);
getLog().info(`Relation not imported since the target note doesn't exist: ${JSON.stringify(attr)}`);
}
}
@@ -639,43 +632,6 @@ function normalizeFilePath(filePath: string): string {
return filePath;
}
function streamToBuffer(stream: Stream): Promise<Buffer> {
const chunks: Uint8Array[] = [];
stream.on("data", (chunk) => chunks.push(chunk));
return new Promise((res, rej) => stream.on("end", () => res(Buffer.concat(chunks))));
}
export function readContent(zipfile: yauzl.ZipFile, entry: yauzl.Entry): Promise<Buffer> {
return new Promise((res, rej) => {
zipfile.openReadStream(entry, (err, readStream) => {
if (err) rej(err);
if (!readStream) throw new Error("Unable to read content.");
streamToBuffer(readStream).then(res);
});
});
}
export function readZipFile(buffer: Buffer, processEntryCallback: (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => Promise<void>) {
return new Promise<void>((res, rej) => {
yauzl.fromBuffer(buffer, { lazyEntries: true, validateEntrySizes: false }, (err, zipfile) => {
if (err) rej(err);
if (!zipfile) throw new Error("Unable to read zip file.");
zipfile.readEntry();
zipfile.on("entry", async (entry) => {
try {
await processEntryCallback(zipfile, entry);
} catch (e) {
rej(e);
}
});
zipfile.on("end", res);
});
});
}
function resolveNoteType(type: string | undefined): NoteType {
// BC for ZIPs created in Trilium 0.57 and older
switch (type) {

View File

@@ -2,7 +2,6 @@ import { type AttachmentRow, type AttributeRow, type BranchRow, dayjs, type Note
import fs from "fs";
import html2plaintext from "html2plaintext";
import { t } from "i18next";
import path from "path";
import url from "url";
import becca from "../becca/becca.js";
@@ -28,6 +27,7 @@ import { getSql } from "./sql/index.js";
import { sanitizeHtml } from "./sanitizer.js";
import { ValidationError } from "../errors.js";
import * as cls from "./context.js";
import { basename } from "./utils/path.js";
interface FoundLink {
name: "imageLink" | "internalLink" | "includeNoteLink" | "relationMapLink";
@@ -552,7 +552,7 @@ async function downloadImage(noteId: string, imageUrl: string) {
}
const parsedUrl = url.parse(unescapedUrl);
const title = path.basename(parsedUrl.pathname || "");
const title = basename(parsedUrl.pathname || "");
const attachment = imageService.saveImageToAttachment(noteId, imageBuffer, title, true, true);

View File

@@ -312,6 +312,26 @@ export class SqlService {
}
}
/**
* Async-safe transaction wrapper for use in Web Workers and other single-threaded async contexts.
* Uses manual BEGIN/COMMIT/ROLLBACK because the synchronous `transactional()` cannot await promises.
*/
async transactionalAsync<T>(func: () => Promise<T>): Promise<T> {
this.execute("BEGIN IMMEDIATE");
try {
const result = await func();
this.execute("COMMIT");
if (!this.dbConnection.inTransaction) {
this.params.onTransactionCommit();
}
return result;
} catch (e) {
this.execute("ROLLBACK");
this.params.onTransactionRollback();
throw e;
}
}
fillParamList(paramIds: string[] | Set<string>, truncate = true) {
if ("length" in paramIds && paramIds.length === 0) {
return;

View File

@@ -15,11 +15,16 @@ import migrationService from "./migration";
export const dbReady = deferred<void>();
let schema: string;
let getDemoArchive: (() => Promise<Uint8Array | null>) | null = null;
export function initSchema(schemaStr: string) {
schema = schemaStr;
}
export function initDemoArchive(fn: () => Promise<Uint8Array | null>) {
getDemoArchive = fn;
}
function schemaExists() {
return !!getSql().getValue(/*sql*/`SELECT name FROM sqlite_master
WHERE type = 'table' AND name = 'options'`);
@@ -177,21 +182,23 @@ async function createInitialDatabase(skipDemoDb?: boolean) {
});
// Import demo content.
log.info("Importing demo content...");
if (!skipDemoDb && getDemoArchive) {
log.info("Importing demo content...");
const demoFile = await getDemoArchive();
if (demoFile) {
const { default: zipImportService } = await import("./import/zip.js");
const dummyTaskContext = new TaskContext("no-progress-reporting", "importNotes", null);
await zipImportService.importZip(dummyTaskContext, demoFile, rootNote);
}
}
const dummyTaskContext = new TaskContext("no-progress-reporting", "importNotes", null);
// if (demoFile) {
// await zipImportService.importZip(dummyTaskContext, demoFile, rootNote);
// }
// Post-demo.
// Post-demo: pick the first visible (non-system) child of root as the start note.
// System notes have IDs starting with "_" and should not be navigated to on startup.
// Falls back to "root" if no visible child exists (e.g. empty database).
sql.transactional(() => {
// this needs to happen after ZIP import,
// the previous solution was to move option initialization here, but then the important parts of initialization
// are not all in one transaction (because ZIP import is async and thus not transactional)
const startNoteId = sql.getValue("SELECT noteId FROM branches WHERE parentNoteId = 'root' AND isDeleted = 0 ORDER BY notePosition");
const startNoteId = sql.getValue<string | null>(
"SELECT noteId FROM branches WHERE parentNoteId = 'root' AND isDeleted = 0 AND substr(noteId, 1, 1) != '_' ORDER BY notePosition"
) ?? "root";
optionService.setOption(
"openNoteContexts",

View File

@@ -1,3 +1,6 @@
import chardet from "chardet";
import stripBom from "strip-bom";
const utf8Decoder = new TextDecoder("utf-8");
const utf8Encoder = new TextEncoder();
@@ -59,3 +62,32 @@ export function wrapStringOrBuffer(stringOrBuffer: string | Uint8Array) {
return stringOrBuffer;
}
}
/**
* For buffers, they are scanned for a supported encoding and decoded (UTF-8, UTF-16). In some cases, the BOM is also stripped.
*
* For strings, they are returned immediately without any transformation.
*
* For nullish values, an empty string is returned.
*
* @param data the string or buffer to process.
* @returns the string representation of the buffer, or the same string is it's a string.
*/
export function processStringOrBuffer(data: string | Uint8Array | null) {
if (!data) {
return "";
}
if (typeof data === "string") {
return data;
}
const detectedEncoding = chardet.detect(data);
switch (detectedEncoding) {
case "UTF-16LE":
return stripBom(new TextDecoder("utf-16le").decode(data));
case "UTF-8":
default:
return utf8Decoder.decode(data);
}
}

View File

@@ -8,6 +8,7 @@ import unescape from "unescape";
import { basename, extname } from "./path";
import { NoteMeta } from "../../meta";
export function isDev() { return getPlatform().getEnv("TRILIUM_ENV") === "dev"; }
export function isElectron() { return getPlatform().isElectron; }
export function isMac() { return getPlatform().isMac; }
export function isWindows() { return getPlatform().isWindows; }
@@ -20,6 +21,11 @@ export function hash(text: string) {
return encodeBase64(getCrypto().createHash("sha1", text.normalize()));
}
export function md5(content: string | Uint8Array) {
const bytes = getCrypto().createHash("md5", content);
return Array.from(bytes).map((b) => b.toString(16).padStart(2, "0")).join("");
}
export function isStringNote(type: string | undefined, mime: string) {
return (type && STRING_NOTE_TYPES.has(type)) || mime.startsWith("text/") || STRING_MIME_TYPES.has(mime);
}

View File

@@ -16,3 +16,12 @@ export function basename(filePath: string): string {
const lastSlash = Math.max(filePath.lastIndexOf("/"), filePath.lastIndexOf("\\"));
return filePath.substring(lastSlash + 1);
}
/** Returns the directory part of a file path, or "." if there is none. */
export function dirname(filePath: string): string {
const normalized = filePath.replace(/\\/g, "/");
const lastSlash = normalized.lastIndexOf("/");
if (lastSlash === -1) return ".";
if (lastSlash === 0) return "/";
return normalized.substring(0, lastSlash);
}

View File

@@ -0,0 +1,48 @@
export interface ZipEntry {
fileName: string;
}
export interface ZipArchiveEntryOptions {
name: string;
date?: Date;
}
export interface ZipArchive {
append(content: string | Uint8Array, options: ZipArchiveEntryOptions): void;
pipe(destination: unknown): void;
finalize(): Promise<void>;
}
export interface FileStream {
/** An opaque writable destination that can be passed to {@link ZipArchive.pipe}. */
destination: unknown;
/** Resolves when the stream has finished writing (or rejects on error). */
waitForFinish(): Promise<void>;
}
export interface ZipProvider {
/**
* Iterates over every entry in a ZIP buffer, calling `processEntry` for each one.
* `readContent()` inside the callback reads the raw bytes of that entry on demand.
*/
readZipFile(
buffer: Uint8Array,
processEntry: (entry: ZipEntry, readContent: () => Promise<Uint8Array>) => Promise<void>
): Promise<void>;
createZipArchive(): ZipArchive;
/** Creates a writable file stream for the given path. */
createFileStream(filePath: string): FileStream;
}
let zipProvider: ZipProvider | null = null;
export function initZipProvider(provider: ZipProvider) {
zipProvider = provider;
}
export function getZipProvider(): ZipProvider {
if (!zipProvider) throw new Error("ZipProvider not initialized.");
return zipProvider;
}

View File

@@ -3,6 +3,7 @@
"compilerOptions": {
"module": "ESNext",
"moduleResolution": "bundler",
"target": "ES2020",
"rootDir": "src",
"outDir": "dist",
"tsBuildInfoFile": "dist/tsconfig.lib.tsbuildinfo",

View File

@@ -2,6 +2,9 @@
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"outDir": "./out-tsc/vitest",
"module": "ESNext",
"moduleResolution": "bundler",
"target": "ES2020",
"types": [
"vitest"
],

402
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -26,14 +26,24 @@ const filtered = lines.filter(
let errorIndex = 0;
const numbered: string[] = [];
const seen = new Set<string>();
let skipContinuation = false;
for (const line of filtered) {
if (ERROR_LINE_PATTERN.test(line)) {
if (seen.has(line)) {
skipContinuation = true;
continue;
}
seen.add(line);
skipContinuation = false;
errorIndex++;
numbered.push(`[${errorIndex}] ${line}`);
} else if (line.trim()) {
// Continuation line (indented context for multi-line errors)
numbered.push(line);
if (!skipContinuation) {
numbered.push(line);
}
}
}