Standalone import (#9204)

This commit is contained in:
Elian Doran
2026-03-27 21:52:02 +02:00
committed by GitHub
66 changed files with 513 additions and 241 deletions

View File

@@ -40,12 +40,14 @@
"color": "5.0.3",
"debounce": "3.0.0",
"draggabilly": "3.0.0",
"fflate": "0.8.2",
"force-graph": "1.51.2",
"globals": "17.4.0",
"i18next": "25.10.10",
"i18next-http-backend": "3.0.2",
"jquery": "4.0.0",
"jquery.fancytree": "2.38.5",
"js-md5": "0.8.3",
"js-sha1": "0.7.0",
"js-sha256": "0.11.1",
"js-sha512": "0.9.0",

View File

@@ -5,6 +5,12 @@
import { getContext, routes } from "@triliumnext/core";
export interface UploadedFile {
originalname: string;
mimetype: string;
buffer: Uint8Array;
}
export interface BrowserRequest {
method: string;
url: string;
@@ -13,6 +19,7 @@ export interface BrowserRequest {
query: Record<string, string | undefined>;
headers?: Record<string, string>;
body?: unknown;
file?: UploadedFile;
}
export interface BrowserResponse {
@@ -154,8 +161,9 @@ export class BrowserRouter {
const query = parseQuery(url.search);
const upperMethod = method.toUpperCase();
// Parse JSON body if it's an ArrayBuffer and content-type suggests JSON
// Parse body based on content-type
let parsedBody = body;
let uploadedFile: UploadedFile | undefined;
if (body instanceof ArrayBuffer && headers) {
const contentType = headers['content-type'] || headers['Content-Type'] || '';
if (contentType.includes('application/json')) {
@@ -166,9 +174,31 @@ export class BrowserRouter {
}
} catch (e) {
console.warn('[Router] Failed to parse JSON body:', e);
// Keep original body if JSON parsing fails
parsedBody = body;
}
} else if (contentType.includes('multipart/form-data')) {
try {
// Reconstruct a Response so we can use the native FormData parser
const response = new Response(body, { headers: { 'content-type': contentType } });
const formData = await response.formData();
const formFields: Record<string, string> = {};
for (const [key, value] of formData.entries()) {
if (typeof value === 'string') {
formFields[key] = value;
} else {
// File field (Blob) — multer uses the field name "upload"
const fileBuffer = new Uint8Array(await value.arrayBuffer());
uploadedFile = {
originalname: value.name,
mimetype: value.type || 'application/octet-stream',
buffer: fileBuffer
};
}
}
parsedBody = formFields;
} catch (e) {
console.warn('[Router] Failed to parse multipart body:', e);
}
}
}
// Find matching route
@@ -191,7 +221,8 @@ export class BrowserRouter {
params,
query,
headers: headers ?? {},
body: parsedBody
body: parsedBody,
file: uploadedFile
};
try {

View File

@@ -35,6 +35,7 @@ function toExpressLikeReq(req: BrowserRequest) {
body: req.body,
headers: req.headers ?? {},
method: req.method,
file: req.file,
get originalUrl() { return req.url; }
};
}
@@ -121,6 +122,45 @@ function createRoute(router: BrowserRouter) {
};
}
/**
* Async variant of createRoute for handlers that return Promises (e.g. import).
* Uses transactionalAsync (manual BEGIN/COMMIT/ROLLBACK) instead of the synchronous
* transactional() wrapper, which would commit an empty transaction immediately when
* passed an async callback.
*/
function createAsyncRoute(router: BrowserRouter) {
return (method: HttpMethod, path: string, _middleware: any[], handler: (req: any, res: any) => Promise<unknown>, resultHandler?: ((req: any, res: any, result: unknown) => unknown) | null) => {
router.register(method, path, (req: BrowserRequest) => {
return getContext().init(async () => {
setContextFromHeaders(req);
const expressLikeReq = toExpressLikeReq(req);
const mockRes = createMockExpressResponse();
const result = await getSql().transactionalAsync(() => handler(expressLikeReq, mockRes));
// If the handler used the mock response (e.g. image routes that call res.send()),
// return it as a raw response so BrowserRouter doesn't JSON-serialize it.
if (mockRes._used) {
return {
[RAW_RESPONSE]: true as const,
status: mockRes._status,
headers: mockRes._headers,
body: mockRes._body
};
}
if (resultHandler) {
// Create a minimal response object that captures what apiResultHandler sets.
const res = createResultHandlerResponse();
resultHandler(expressLikeReq, res, result);
return res.result;
}
return result;
});
});
};
}
/**
* Creates a mock Express response object that captures calls to set(), send(), sendStatus(), etc.
* Used for route handlers (like image routes) that write directly to the response.
@@ -219,7 +259,7 @@ export function registerRoutes(router: BrowserRouter): void {
const apiRoute = createApiRoute(router, true);
routes.buildSharedApiRoutes({
route: createRoute(router),
asyncRoute: createRoute(router),
asyncRoute: createAsyncRoute(router),
apiRoute,
asyncApiRoute: createApiRoute(router, false),
apiResultHandler,
@@ -227,7 +267,9 @@ export function registerRoutes(router: BrowserRouter): void {
checkApiAuthOrElectron: noopMiddleware,
checkAppNotInitialized,
checkCredentials: noopMiddleware,
loginRateLimiter: noopMiddleware
loginRateLimiter: noopMiddleware,
uploadMiddlewareWithErrorHandling: noopMiddleware,
csrfMiddleware: noopMiddleware
});
apiRoute('get', '/bootstrap', bootstrapRoute);

View File

@@ -2,6 +2,7 @@ import type { CryptoProvider } from "@triliumnext/core";
import { sha1 } from "js-sha1";
import { sha256 } from "js-sha256";
import { sha512 } from "js-sha512";
import { md5 } from "js-md5";
interface Cipher {
update(data: Uint8Array): Uint8Array;
@@ -15,11 +16,18 @@ const CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
*/
export default class BrowserCryptoProvider implements CryptoProvider {
createHash(algorithm: "sha1" | "sha512", content: string | Uint8Array): Uint8Array {
createHash(algorithm: "md5" | "sha1" | "sha512", content: string | Uint8Array): Uint8Array {
const data = typeof content === "string" ? content :
new TextDecoder().decode(content);
const hexHash = algorithm === "sha1" ? sha1(data) : sha512(data);
let hexHash: string;
if (algorithm === "md5") {
hexHash = md5(data);
} else if (algorithm === "sha1") {
hexHash = sha1(data);
} else {
hexHash = sha512(data);
}
// Convert hex string to Uint8Array
const bytes = new Uint8Array(hexHash.length / 2);

View File

@@ -501,9 +501,12 @@ export default class BrowserSqlProvider implements DatabaseProvider {
// Helper function to execute within a transaction
const executeTransaction = (beginStatement: string, ...args: unknown[]): T => {
// If we're already in a transaction, use SAVEPOINTs for nesting
// This mimics better-sqlite3's behavior
if (self._inTransaction) {
// If we're already in a transaction (either tracked via JS flag or via actual SQLite
// autocommit state), use SAVEPOINTs for nesting — this handles the case where a manual
// BEGIN was issued directly (e.g. transactionalAsync) without going through transaction().
const sqliteInTransaction = self.db?.pointer !== undefined
&& (self.sqlite3!.capi as any).sqlite3_get_autocommit(self.db!.pointer) === 0;
if (self._inTransaction || sqliteInTransaction) {
const savepointName = `sp_${++savepointCounter}_${Date.now()}`;
self.db!.exec(`SAVEPOINT ${savepointName}`);
try {

View File

@@ -0,0 +1,27 @@
import type { ZipEntry, ZipProvider } from "@triliumnext/core/src/services/import/zip_provider.js";
import { unzip } from "fflate";
export default class BrowserZipProvider implements ZipProvider {
readZipFile(
buffer: Uint8Array,
processEntry: (entry: ZipEntry, readContent: () => Promise<Uint8Array>) => Promise<void>
): Promise<void> {
return new Promise<void>((res, rej) => {
unzip(buffer, async (err, files) => {
if (err) { rej(err); return; }
try {
for (const [fileName, data] of Object.entries(files)) {
await processEntry(
{ fileName },
() => Promise.resolve(data)
);
}
res();
} catch (e) {
rej(e);
}
});
});
}
}

View File

@@ -55,6 +55,7 @@ let BrowserSqlProvider: typeof import('./lightweight/sql_provider').default;
let WorkerMessagingProvider: typeof import('./lightweight/messaging_provider').default;
let BrowserExecutionContext: typeof import('./lightweight/cls_provider').default;
let BrowserCryptoProvider: typeof import('./lightweight/crypto_provider').default;
let BrowserZipProvider: typeof import('./lightweight/zip_provider').default;
let FetchRequestProvider: typeof import('./lightweight/request_provider').default;
let StandalonePlatformProvider: typeof import('./lightweight/platform_provider').default;
let translationProvider: typeof import('./lightweight/translation_provider').default;
@@ -82,6 +83,7 @@ async function loadModules(): Promise<void> {
messagingModule,
clsModule,
cryptoModule,
zipModule,
requestModule,
platformModule,
translationModule,
@@ -91,6 +93,7 @@ async function loadModules(): Promise<void> {
import('./lightweight/messaging_provider.js'),
import('./lightweight/cls_provider.js'),
import('./lightweight/crypto_provider.js'),
import('./lightweight/zip_provider.js'),
import('./lightweight/request_provider.js'),
import('./lightweight/platform_provider.js'),
import('./lightweight/translation_provider.js'),
@@ -101,6 +104,7 @@ async function loadModules(): Promise<void> {
WorkerMessagingProvider = messagingModule.default;
BrowserExecutionContext = clsModule.default;
BrowserCryptoProvider = cryptoModule.default;
BrowserZipProvider = zipModule.default;
FetchRequestProvider = requestModule.default;
StandalonePlatformProvider = platformModule.default;
translationProvider = translationModule.default;
@@ -152,11 +156,17 @@ async function initialize(): Promise<void> {
await coreModule.initializeCore({
executionContext: new BrowserExecutionContext(),
crypto: new BrowserCryptoProvider(),
zip: new BrowserZipProvider(),
messaging: messagingProvider!,
request: new FetchRequestProvider(),
platform: new StandalonePlatformProvider(queryString),
translations: translationProvider,
schema: schemaModule.default,
getDemoArchive: async () => {
const response = await fetch("/server-assets/db/demo.zip");
if (!response.ok) return null;
return new Uint8Array(await response.arrayBuffer());
},
dbConfig: {
provider: sqlProvider!,
isReadOnly: false,

View File

@@ -1,6 +1,7 @@
import { getLog, initializeCore, sql_init } from "@triliumnext/core";
import ClsHookedExecutionContext from "@triliumnext/server/src/cls_provider.js";
import NodejsCryptoProvider from "@triliumnext/server/src/crypto_provider.js";
import NodejsZipProvider from "@triliumnext/server/src/zip_provider.js";
import dataDirs from "@triliumnext/server/src/services/data_dir.js";
import options from "@triliumnext/server/src/services/options.js";
import port from "@triliumnext/server/src/services/port.js";
@@ -133,12 +134,14 @@ async function main() {
}
},
crypto: new NodejsCryptoProvider(),
zip: new NodejsZipProvider(),
request: new NodeRequestProvider(),
executionContext: new ClsHookedExecutionContext(),
messaging: new WebSocketMessagingProvider(),
schema: fs.readFileSync(require.resolve("@triliumnext/core/src/assets/schema.sql"), "utf-8"),
platform: new DesktopPlatformProvider(),
translations: (await import("@triliumnext/server/src/services/i18n.js")).initializeTranslations,
getDemoArchive: async () => fs.readFileSync(require.resolve("@triliumnext/server/src/assets/db/demo.zip")),
extraAppInfo: {
nodeVersion: process.version,
dataDirectory: path.resolve(dataDirs.TRILIUM_DATA_DIR)

View File

@@ -62,7 +62,7 @@ export function startElectron(callback: () => void): DeferredPromise<void> {
export async function importData(path: string) {
const buffer = await createImportZip(path);
const importService = (await import("@triliumnext/server/src/services/import/zip.js")).default;
const { zipImportService } = (await import("@triliumnext/core"));
const context = new TaskContext("no-progress-reporting", "importNotes", null);
const becca = (await import("@triliumnext/server/src/becca/becca.js")).default;
@@ -70,7 +70,7 @@ export async function importData(path: string) {
if (!rootNote) {
throw new Error("Missing root note for import.");
}
await importService.importZip(context, buffer, rootNote, {
await zipImportService.importZip(context, buffer, rootNote, {
preserveIds: true
});
}
@@ -106,19 +106,18 @@ function waitForEnd(archive: Archiver, stream: WriteStream) {
export async function extractZip(zipFilePath: string, outputPath: string, ignoredFiles?: Set<string>) {
const promise = deferred<void>();
setTimeout(async () => {
// Then extract the zip.
const { readZipFile, readContent } = (await import("@triliumnext/server/src/services/import/zip.js"));
await readZipFile(await fs.readFile(zipFilePath), async (zip, entry) => {
const { getZipProvider } = (await import("@triliumnext/core"));
const zipProvider = getZipProvider();
const buffer = await fs.readFile(zipFilePath);
await zipProvider.readZipFile(buffer, async (entry, readContent) => {
// We ignore directories since they can appear out of order anyway.
if (!entry.fileName.endsWith("/") && !ignoredFiles?.has(entry.fileName)) {
const destPath = path.join(outputPath, entry.fileName);
const fileContent = await readContent(zip, entry);
const fileContent = await readContent();
await fsExtra.mkdirs(path.dirname(destPath));
await fs.writeFile(destPath, fileContent);
}
zip.readEntry();
});
promise.resolve();
}, 1000);

View File

@@ -68,7 +68,6 @@
"axios": "1.13.6",
"bindings": "1.5.0",
"bootstrap": "5.3.8",
"chardet": "2.1.1",
"cheerio": "1.2.0",
"chokidar": "5.0.0",
"cls-hooked": "4.2.2",
@@ -108,8 +107,7 @@
"safe-compare": "1.1.4",
"sax": "1.6.0",
"serve-favicon": "2.5.1",
"stream-throttle": "0.1.3",
"strip-bom": "5.0.0",
"stream-throttle": "0.1.3",
"striptags": "3.2.0",
"supertest": "7.2.2",
"swagger-jsdoc": "6.2.8",

View File

@@ -4,6 +4,7 @@ import { join } from "path";
import { initializeCore } from "@triliumnext/core";
import ClsHookedExecutionContext from "../src/cls_provider.js";
import NodejsCryptoProvider from "../src/crypto_provider.js";
import NodejsZipProvider from "../src/zip_provider.js";
import ServerPlatformProvider from "../src/platform_provider.js";
import BetterSqlite3Provider from "../src/sql_provider.js";
import { initializeTranslations } from "../src/services/i18n.js";
@@ -27,6 +28,7 @@ beforeAll(async () => {
onTransactionRollback() {}
},
crypto: new NodejsCryptoProvider(),
zip: new NodejsZipProvider(),
executionContext: new ClsHookedExecutionContext(),
schema: readFileSync(require.resolve("@triliumnext/core/src/assets/schema.sql"), "utf-8"),
platform: new ServerPlatformProvider(),

View File

@@ -6,7 +6,7 @@ const randtoken = generator({ source: "crypto" });
export default class NodejsCryptoProvider implements CryptoProvider {
createHash(algorithm: "sha1", content: string | Uint8Array): Uint8Array {
createHash(algorithm: "md5" | "sha1" | "sha512", content: string | Uint8Array): Uint8Array {
return crypto.createHash(algorithm).update(content).digest();
}

View File

@@ -1,11 +1,10 @@
import { NoteParams, SearchParams } from "@triliumnext/core";
import { NoteParams, SearchParams, zipImportService } from "@triliumnext/core";
import type { Request, Router } from "express";
import type { ParsedQs } from "qs";
import becca from "../becca/becca.js";
import zipExportService from "../services/export/zip.js";
import type { ExportFormat } from "../services/export/zip/abstract_provider.js";
import zipImportService from "../services/import/zip.js";
import noteService from "../services/notes.js";
import SearchContext from "../services/search/search_context.js";
import searchService from "../services/search/services/search.js";

View File

@@ -10,6 +10,7 @@ import path from "path";
import ClsHookedExecutionContext from "./cls_provider.js";
import NodejsCryptoProvider from "./crypto_provider.js";
import NodejsZipProvider from "./zip_provider.js";
import ServerPlatformProvider from "./platform_provider.js";
import dataDirs from "./services/data_dir.js";
import port from "./services/port.js";
@@ -51,12 +52,14 @@ async function startApplication() {
}
},
crypto: new NodejsCryptoProvider(),
zip: new NodejsZipProvider(),
request: new NodeRequestProvider(),
executionContext: new ClsHookedExecutionContext(),
messaging: new WebSocketMessagingProvider(),
schema: fs.readFileSync(require.resolve("@triliumnext/core/src/assets/schema.sql"), "utf-8"),
platform: new ServerPlatformProvider(),
translations: (await import("./services/i18n.js")).initializeTranslations,
getDemoArchive: async () => fs.readFileSync(require.resolve("@triliumnext/server/src/assets/db/demo.zip")),
extraAppInfo: {
nodeVersion: process.version,
dataDirectory: path.resolve(dataDirs.TRILIUM_DATA_DIR)

View File

@@ -1,8 +1,8 @@
import { RenderMarkdownResponse, ToMarkdownResponse } from "@triliumnext/commons";
import { markdownImportService } from "@triliumnext/core";
import type { Request } from "express";
import markdownService from "../../services/import/markdown.js";
import markdown from "../../services/export/markdown.js";
import { RenderMarkdownResponse, ToMarkdownResponse } from "@triliumnext/commons";
function renderMarkdown(req: Request) {
const { markdownContent } = req.body;
@@ -10,7 +10,7 @@ function renderMarkdown(req: Request) {
throw new Error('markdownContent parameter is required and must be a string');
}
return {
htmlContent: markdownService.renderToHtml(markdownContent, "")
htmlContent: markdownImportService.renderToHtml(markdownContent, "")
} satisfies RenderMarkdownResponse;
}

View File

@@ -25,7 +25,6 @@ import etapiTokensApiRoutes from "./api/etapi_tokens.js";
import exportRoute from "./api/export.js";
import filesRoute from "./api/files.js";
import fontsRoute from "./api/fonts.js";
import importRoute from "./api/import.js";
import loginApiRoute from "./api/login.js";
import metricsRoute from "./api/metrics.js";
import otherRoute from "./api/other.js";
@@ -89,7 +88,9 @@ function register(app: express.Application) {
checkApiAuthOrElectron: auth.checkApiAuthOrElectron,
checkAppNotInitialized: auth.checkAppNotInitialized,
checkCredentials: auth.checkCredentials,
loginRateLimiter
loginRateLimiter,
uploadMiddlewareWithErrorHandling,
csrfMiddleware
});
route(PUT, "/api/notes/:noteId/file", [auth.checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], filesRoute.updateFile, apiResultHandler);
@@ -130,8 +131,6 @@ function register(app: express.Application) {
// route(GET, "/api/revisions/:revisionId/download", [auth.checkApiAuthOrElectron], revisionsApiRoute.downloadRevision);
route(GET, "/api/branches/:branchId/export/:type/:format/:version/:taskId", [auth.checkApiAuthOrElectron], exportRoute.exportBranch);
asyncRoute(PST, "/api/notes/:parentNoteId/notes-import", [auth.checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], importRoute.importNotesToBranch, apiResultHandler);
route(PST, "/api/notes/:parentNoteId/attachments-import", [auth.checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], importRoute.importAttachmentsToNote, apiResultHandler);
// :filename is not used by trilium, but instead used for "save as" to assign a human-readable filename

View File

@@ -46,10 +46,6 @@ function putEntityChange(entityChange: EntityChange) {
cls.putEntityChange(entityChange);
}
function ignoreEntityChangeIds() {
cls.getContext().set("ignoreEntityChangeIds", true);
}
function get(key: string) {
return cls.getContext().get(key);
}
@@ -62,7 +58,10 @@ function reset() {
cls.getContext().reset();
}
/** @deprecated */
export const wrap = cls.wrap;
/** @deprecated */
export const ignoreEntityChangeIds = cls.ignoreEntityChangeIds;
export default {
init,

View File

@@ -1,9 +1,7 @@
import { getCrypto,utils as coreUtils } from "@triliumnext/core";
import chardet from "chardet";
import { binary_utils,getCrypto, utils as coreUtils } from "@triliumnext/core";
import crypto from "crypto";
import { release as osRelease } from "os";
import path from "path";
import stripBom from "strip-bom";
const osVersion = osRelease().split('.').map(Number);
@@ -27,10 +25,6 @@ export function randomString(length: number): string {
return coreUtils.randomString(length);
}
export function md5(content: crypto.BinaryLike) {
return crypto.createHash("md5").update(content).digest("hex");
}
/** @deprecated */
export function hashedBlobId(content: string | Buffer) {
return coreUtils.hashedBlobId(content);
@@ -138,35 +132,6 @@ export function getResourceDir() {
return path.join(__dirname, "..");
}
/**
* For buffers, they are scanned for a supported encoding and decoded (UTF-8, UTF-16). In some cases, the BOM is also stripped.
*
* For strings, they are returned immediately without any transformation.
*
* For nullish values, an empty string is returned.
*
* @param data the string or buffer to process.
* @returns the string representation of the buffer, or the same string is it's a string.
*/
export function processStringOrBuffer(data: string | Buffer | null) {
if (!data) {
return "";
}
if (!Buffer.isBuffer(data)) {
return data;
}
const detectedEncoding = chardet.detect(data);
switch (detectedEncoding) {
case "UTF-16LE":
return stripBom(data.toString("utf-16le"));
case "UTF-8":
default:
return data.toString("utf-8");
}
}
/** @deprecated */
export const escapeHtml = coreUtils.escapeHtml;
/** @deprecated */
@@ -183,6 +148,7 @@ export const isEmptyOrWhitespace = coreUtils.isEmptyOrWhitespace;
export const normalizeUrl = coreUtils.normalizeUrl;
export const timeLimit = coreUtils.timeLimit;
export const sanitizeSqlIdentifier = coreUtils.sanitizeSqlIdentifier;
export const processStringOrBuffer = binary_utils.processStringOrBuffer;
export function waitForStreamToFinish(stream: any): Promise<void> {
return new Promise((resolve, reject) => {
@@ -207,7 +173,6 @@ export default {
isMac,
isStringNote,
isWindows,
md5,
newEntityId,
normalize,
quoteRegex,

View File

@@ -0,0 +1,46 @@
import type { ZipEntry, ZipProvider } from "@triliumnext/core/src/services/import/zip_provider.js";
import type { Stream } from "stream";
import yauzl from "yauzl";
function streamToBuffer(stream: Stream): Promise<Buffer> {
const chunks: Uint8Array[] = [];
stream.on("data", (chunk: Uint8Array) => chunks.push(chunk));
return new Promise((res, rej) => {
stream.on("end", () => res(Buffer.concat(chunks)));
stream.on("error", rej);
});
}
export default class NodejsZipProvider implements ZipProvider {
readZipFile(
buffer: Uint8Array,
processEntry: (entry: ZipEntry, readContent: () => Promise<Uint8Array>) => Promise<void>
): Promise<void> {
return new Promise<void>((res, rej) => {
yauzl.fromBuffer(Buffer.from(buffer), { lazyEntries: true, validateEntrySizes: false }, (err, zipfile) => {
if (err) { rej(err); return; }
if (!zipfile) { rej(new Error("Unable to read zip file.")); return; }
zipfile.readEntry();
zipfile.on("entry", async (entry: yauzl.Entry) => {
try {
const readContent = () => new Promise<Uint8Array>((res, rej) => {
zipfile.openReadStream(entry, (err, readStream) => {
if (err) { rej(err); return; }
if (!readStream) { rej(new Error("Unable to read content.")); return; }
streamToBuffer(readStream).then(res, rej);
});
});
await processEntry({ fileName: entry.fileName }, readContent);
} catch (e) {
rej(e);
}
zipfile.readEntry();
});
zipfile.on("end", res);
zipfile.on("error", rej);
});
});
}
}

View File

@@ -10,12 +10,14 @@
"@braintree/sanitize-url": "7.1.1",
"@triliumnext/commons": "workspace:*",
"async-mutex": "0.5.0",
"chardet": "2.1.1",
"escape-html": "1.0.3",
"i18next": "25.10.10",
"mime-types": "3.0.2",
"node-html-parser": "7.1.0",
"sanitize-filename": "1.6.4",
"sanitize-html": "2.17.2",
"strip-bom": "5.0.0",
"unescape": "1.0.1"
},
"devDependencies": {

View File

@@ -6,9 +6,11 @@ import { SqlService, SqlServiceParams } from "./services/sql/sql";
import { initMessaging, MessagingProvider } from "./services/messaging/index";
import { initRequest, RequestProvider } from "./services/request";
import { initTranslations, TranslationProvider } from "./services/i18n";
import { initSchema } from "./services/sql_init";
import { initSchema, initDemoArchive } from "./services/sql_init";
import appInfo from "./services/app_info";
import { type PlatformProvider, initPlatform } from "./services/platform";
import { type ZipProvider, initZipProvider } from "./services/import/zip_provider";
import markdown from "./services/import/markdown";
export { getLog } from "./services/log";
export type * from "./services/sql/types";
@@ -99,18 +101,25 @@ export type { RequestProvider, ExecOpts, CookieJar } from "./services/request";
export type * from "./meta";
export * as routeHelpers from "./routes/helpers";
export { getZipProvider, type ZipProvider } from "./services/import/zip_provider";
export { default as zipImportService } from "./services/import/zip";
export * as becca_easy_mocking from "./test/becca_easy_mocking";
export * as becca_mocking from "./test/becca_mocking";
export async function initializeCore({ dbConfig, executionContext, crypto, translations, messaging, request, schema, extraAppInfo, platform }: {
export { default as markdownImportService } from "./services/import/markdown";
export async function initializeCore({ dbConfig, executionContext, crypto, zip, translations, messaging, request, schema, extraAppInfo, platform, getDemoArchive }: {
dbConfig: SqlServiceParams,
executionContext: ExecutionContext,
crypto: CryptoProvider,
zip: ZipProvider,
translations: TranslationProvider,
platform: PlatformProvider,
schema: string,
messaging?: MessagingProvider,
request?: RequestProvider,
getDemoArchive?: () => Promise<Uint8Array | null>,
extraAppInfo?: {
nodeVersion: string;
dataDirectory: string;
@@ -120,9 +129,13 @@ export async function initializeCore({ dbConfig, executionContext, crypto, trans
initLog();
await initTranslations(translations);
initCrypto(crypto);
initZipProvider(zip);
initContext(executionContext);
initSql(new SqlService(dbConfig, getLog()));
initSchema(schema);
if (getDemoArchive) {
initDemoArchive(getDemoArchive);
}
Object.assign(appInfo, extraAppInfo);
if (messaging) {
initMessaging(messaging);

View File

@@ -1,19 +1,23 @@
import { becca_loader, ValidationError } from "@triliumnext/core";
import type { Request } from "express";
import path from "path";
import type { File } from "../../services/import/common.js";
type ImportRequest<P> = Omit<Request<P>, "file"> & { file?: File };
import becca from "../../becca/becca.js";
import type BNote from "../../becca/entities/bnote.js";
import cls from "../../services/cls.js";
import enexImportService from "../../services/import/enex.js";
// import enexImportService from "../../services/import/enex.js";
import opmlImportService from "../../services/import/opml.js";
import singleImportService from "../../services/import/single.js";
import zipImportService from "../../services/import/zip.js";
import log from "../../services/log.js";
import { getLog } from "../../services/log.js";
import TaskContext from "../../services/task_context.js";
import { safeExtractMessageAndStackFromError } from "../../services/utils.js";
import { safeExtractMessageAndStackFromError } from "../../services/utils/index.js";
import * as cls from "../../services/context.js";
import { ValidationError } from "../../errors.js";
import becca_loader from "../../becca/becca_loader.js";
import { extname } from "../../services/utils/path.js";
async function importNotesToBranch(req: Request<{ parentNoteId: string }>) {
async function importNotesToBranch(req: ImportRequest<{ parentNoteId: string }>) {
const { parentNoteId } = req.params;
const { taskId, last } = req.body;
@@ -34,7 +38,7 @@ async function importNotesToBranch(req: Request<{ parentNoteId: string }>) {
const parentNote = becca.getNoteOrThrow(parentNoteId);
const extension = path.extname(file.originalname).toLowerCase();
const extension = extname(file.originalname).toLowerCase();
// running all the event handlers on imported notes (and attributes) is slow
// and may produce unintended consequences
@@ -58,21 +62,22 @@ async function importNotesToBranch(req: Request<{ parentNoteId: string }>) {
return importResult;
}
} else if (extension === ".enex" && options.explodeArchives) {
const importResult = await enexImportService.importEnex(taskContext, file, parentNote);
if (!Array.isArray(importResult)) {
note = importResult;
} else {
return importResult;
}
throw "ENEX import is currently not supported. Please use the desktop app to import ENEX files and then sync with the server.";
// const importResult = await enexImportService.importEnex(taskContext, file, parentNote);
// if (!Array.isArray(importResult)) {
// note = importResult;
// } else {
// return importResult;
// }
} else {
note = await singleImportService.importSingleFile(taskContext, file, parentNote);
note = singleImportService.importSingleFile(taskContext, file, parentNote);
}
} catch (e: unknown) {
const [errMessage, errStack] = safeExtractMessageAndStackFromError(e);
const message = `Import failed with following error: '${errMessage}'. More details might be in the logs.`;
taskContext.reportError(message);
log.error(message + errStack);
getLog().error(message + errStack);
return [500, message];
}
@@ -99,7 +104,7 @@ async function importNotesToBranch(req: Request<{ parentNoteId: string }>) {
return note.getPojo();
}
function importAttachmentsToNote(req: Request<{ parentNoteId: string }>) {
function importAttachmentsToNote(req: ImportRequest<{ parentNoteId: string }>) {
const { parentNoteId } = req.params;
const { taskId, last } = req.body;
@@ -126,7 +131,7 @@ function importAttachmentsToNote(req: Request<{ parentNoteId: string }>) {
const message = `Import failed with following error: '${errMessage}'. More details might be in the logs.`;
taskContext.reportError(message);
log.error(message + errStack);
getLog().error(message + errStack);
return [500, message];
}

View File

@@ -1,6 +1,5 @@
import { EditedNotesResponse, RevisionItem, RevisionPojo } from "@triliumnext/commons";
import type { Request, Response } from "express";
import path from "path";
import becca from "../../becca/becca.js";
import type BNote from "../../becca/entities/bnote.js";
@@ -10,6 +9,7 @@ import eraseService from "../../services/erase.js";
import { NotePojo } from "../../becca/becca-interface.js";
import { becca_service, binary_utils, cls, getSql } from "../../index.js";
import { formatDownloadTitle, getContentDisposition } from "../../services/utils/index.js";
import { extname } from "../../services/utils/path.js";
interface NotePath {
noteId: string;
@@ -67,7 +67,7 @@ function getRevisionFilename(revision: BRevision) {
throw new Error("Missing creation date for revision.");
}
const extension = path.extname(filename);
const extension = extname(filename);
const date = revision.dateCreated
.substr(0, 19)
.replace(" ", "_")

View File

@@ -15,7 +15,7 @@ function getStatus() {
async function setupNewDocument(req: Request) {
const { skipDemoDb } = req.query;
await sqlInit.createInitialDatabase(!!skipDemoDb);
await sqlInit.createInitialDatabase(skipDemoDb !== undefined);
}
function setupSyncFromServer(req: Request): Promise<SetupSyncFromServerResponse> {

View File

@@ -25,6 +25,7 @@ import similarNotesRoute from "./api/similar_notes";
import imageRoute from "./api/image";
import setupApiRoute from "./api/setup";
import filesRoute from "./api/files";
import importRoute from "./api/import";
// TODO: Deduplicate with routes.ts
const GET = "get",
@@ -44,9 +45,11 @@ interface SharedApiRoutesContext {
checkAppNotInitialized: any;
loginRateLimiter: any;
checkCredentials: any;
uploadMiddlewareWithErrorHandling: any;
csrfMiddleware: any;
}
export function buildSharedApiRoutes({ route, asyncRoute, apiRoute, asyncApiRoute, checkApiAuth, apiResultHandler, checkApiAuthOrElectron, checkAppNotInitialized, checkCredentials, loginRateLimiter }: SharedApiRoutesContext) {
export function buildSharedApiRoutes({ route, asyncRoute, apiRoute, asyncApiRoute, checkApiAuth, apiResultHandler, checkApiAuthOrElectron, checkAppNotInitialized, checkCredentials, loginRateLimiter, uploadMiddlewareWithErrorHandling, csrfMiddleware }: SharedApiRoutesContext) {
apiRoute(GET, '/api/tree', treeApiRoute.getTree);
apiRoute(PST, '/api/tree/load', treeApiRoute.load);
@@ -136,6 +139,9 @@ export function buildSharedApiRoutes({ route, asyncRoute, apiRoute, asyncApiRout
route(PST, "/api/sync/queue-sector/:entityName/:sector", [checkApiAuth], syncApiRoute.queueSector, apiResultHandler);
route(GET, "/api/sync/stats", [], syncApiRoute.getStats, apiResultHandler);
asyncRoute(PST, "/api/notes/:parentNoteId/notes-import", [checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], importRoute.importNotesToBranch, apiResultHandler);
route(PST, "/api/notes/:parentNoteId/attachments-import", [checkApiAuthOrElectron, uploadMiddlewareWithErrorHandling, csrfMiddleware], importRoute.importAttachmentsToNote, apiResultHandler);
apiRoute(GET, "/api/quick-search/:searchString", searchRoute.quickSearch);
apiRoute(GET, "/api/search-note/:noteId", searchRoute.searchFromNote);
apiRoute(PST, "/api/search-and-execute-note/:noteId", searchRoute.searchAndExecute);

View File

@@ -77,3 +77,7 @@ export function getAndClearEntityChangeIds() {
return entityChangeIds;
}
export function ignoreEntityChangeIds() {
getContext().set("ignoreEntityChangeIds", true);
}

View File

@@ -5,7 +5,7 @@ interface Cipher {
export interface CryptoProvider {
createHash(algorithm: "sha1" | "sha512", content: string | Uint8Array): Uint8Array;
createHash(algorithm: "md5" | "sha1" | "sha512", content: string | Uint8Array): Uint8Array;
randomBytes(size: number): Uint8Array;
randomString(length: number): string;
createCipheriv(algorithm: "aes-128-cbc", key: Uint8Array, iv: Uint8Array): Cipher;

View File

@@ -0,0 +1,10 @@
// TODO: Move this to a dedicated file someday.
export const ADMONITION_TYPE_MAPPINGS: Record<string, string> = {
note: "NOTE",
tip: "TIP",
important: "IMPORTANT",
caution: "CAUTION",
warning: "WARNING"
};
export const DEFAULT_ADMONITION_TYPE = ADMONITION_TYPE_MAPPINGS.note;

View File

@@ -1,5 +1,5 @@
export default {
saveImageToAttachment(noteId: string, imageBuffer: Uint8Array, title: string, b1: boolean, b2: boolean) {
saveImageToAttachment(noteId: string, imageBuffer: Uint8Array, title: string, b1?: boolean, b2?: boolean) {
console.warn("Image save ignored", noteId, title);
return {
@@ -10,5 +10,13 @@ export default {
updateImage(noteId: string, imageBuffer: Uint8Array, title: string) {
console.warn("Image update ignored", noteId, title);
},
saveImage(noteId: string, imageBuffer: Uint8Array, title: string, b1?: boolean, b2?: boolean) {
console.warn("Image save ignored", noteId, title);
return {
note: null
};
}
}

View File

@@ -1,5 +1,5 @@
export interface File {
originalname: string;
mimetype: string;
buffer: string | Buffer;
buffer: string | Buffer | Uint8Array;
}

View File

@@ -1,20 +1,22 @@
import type { AttributeType } from "@triliumnext/commons";
import { dayjs } from "@triliumnext/commons";
import { sanitize, utils } from "@triliumnext/core";
import sax from "sax";
import stream from "stream";
import { Throttle } from "stream-throttle";
import type BNote from "../../becca/entities/bnote.js";
import date_utils from "../date_utils.js";
import date_utils from "../utils/date.js";
import * as utils from "../utils/index.js";
import imageService from "../image.js";
import log from "../log.js";
import { getLog } from "../log.js";
import noteService from "../notes.js";
import protectedSessionService from "../protected_session.js";
import sql from "../sql.js";
import type TaskContext from "../task_context.js";
import { escapeHtml, fromBase64,md5 } from "../utils.js";
import { escapeHtml, md5 } from "../utils/index.js";
import { decodeBase64 } from "../utils/binary.js";
import type { File } from "./common.js";
import { sanitizeHtml } from "../sanitizer.js";
import { getSql } from "../sql/index.js";
/**
* date format is e.g. 20181121T193703Z or 2013-04-14T16:19:00.000Z (Mac evernote, see #3496)
@@ -38,7 +40,7 @@ interface Attribute {
interface Resource {
title: string;
content?: Buffer | string;
content?: Uint8Array | string;
mime?: string;
attributes: Attribute[];
}
@@ -117,7 +119,7 @@ function importEnex(taskContext: TaskContext<"importNotes">, file: File, parentN
"\u2611 "
);
content = sanitize.sanitizeHtml(content);
content = sanitizeHtml(content);
return content;
}
@@ -138,7 +140,7 @@ function importEnex(taskContext: TaskContext<"importNotes">, file: File, parentN
saxStream.on("error", (e) => {
// unhandled errors will throw, since this is a proper node event emitter.
log.error(`error when parsing ENEX file: ${e}`);
getLog().error(`error when parsing ENEX file: ${e}`);
// clear the error
(saxStream._parser as any).error = null;
saxStream._parser.resume();
@@ -235,6 +237,8 @@ function importEnex(taskContext: TaskContext<"importNotes">, file: File, parentN
}
});
const sql = getSql();
function updateDates(note: BNote, utcDateCreated?: string, utcDateModified?: string) {
// it's difficult to force custom dateCreated and dateModified to Note entity, so we do it post-creation with SQL
const dateCreated = formatDateTimeToLocalDbFormat(utcDateCreated, false);
@@ -295,7 +299,7 @@ function importEnex(taskContext: TaskContext<"importNotes">, file: File, parentN
}
if (typeof resource.content === "string") {
resource.content = fromBase64(resource.content);
resource.content = decodeBase64(resource.content);
}
const hash = md5(resource.content);
@@ -359,7 +363,7 @@ function importEnex(taskContext: TaskContext<"importNotes">, file: File, parentN
content += imageLink;
}
} catch (e: any) {
log.error(`error when saving image from ENEX file: ${e.message}`);
getLog().error(`error when saving image from ENEX file: ${e.message}`);
createFileNote();
}
} else {
@@ -367,7 +371,7 @@ function importEnex(taskContext: TaskContext<"importNotes">, file: File, parentN
}
}
content = sanitize.sanitizeHtml(content);
content = sanitizeHtml(content);
// save updated content with links to files/images
noteEntity.setContent(content);

View File

@@ -1,17 +1,15 @@
import { getMimeTypeFromMarkdownName, MIME_TYPE_AUTO } from "@triliumnext/commons";
import { normalizeMimeTypeForCKEditor } from "@triliumnext/commons";
import { sanitize } from "@triliumnext/core";
import { parse, Renderer, type Tokens,use } from "marked";
import { ADMONITION_TYPE_MAPPINGS } from "../export/markdown.js";
import utils from "../utils.js";
import wikiLinkInternalLink from "./markdown/wikilink_internal_link.js";
import wikiLinkTransclusion from "./markdown/wikilink_transclusion.js";
import importUtils from "./utils.js";
import { escapeHtml } from "../utils/index.js";
import { sanitizeHtml } from "../sanitizer.js";
const escape = utils.escapeHtml;
const escape = escapeHtml;
/**
* Keep renderer code up to date with https://github.com/markedjs/marked/blob/master/src/Renderer.ts.
@@ -151,7 +149,7 @@ function renderToHtml(content: string, title: string) {
// h1 handling needs to come before sanitization
html = importUtils.handleH1(html, title);
html = sanitize.sanitizeHtml(html);
html = sanitizeHtml(html);
// Add a trailing semicolon to CSS styles.
html = html.replaceAll(/(<(img|figure|col).*?style=".*?)"/g, "$1;\"");

View File

@@ -1,8 +1,8 @@
"use strict";
import mimeTypes from "mime-types";
import path from "path";
import { types as extToMime } from "mime-types";
import type { NoteType, TaskData } from "@triliumnext/commons";
import { extname } from "../utils/path";
const CODE_MIME_TYPES = new Set([
"application/json",
@@ -84,10 +84,10 @@ function getMime(fileName: string) {
return "text/x-dockerfile";
}
const ext = path.extname(fileNameLc);
const ext = extname(fileNameLc);
const mimeFromExt = EXTENSION_TO_MIME.get(ext);
return mimeFromExt || mimeTypes.lookup(fileNameLc);
return mimeFromExt || extToMime[ext.slice(1)] || false;
}
function getType(options: TaskData<"importNotes">, mime: string): NoteType {

View File

@@ -1,12 +1,10 @@
import { sanitize } from "@triliumnext/core";
import xml2js from "xml2js";
import type BNote from "../../becca/entities/bnote.js";
import noteService from "../../services/notes.js";
import protectedSessionService from "../protected_session.js";
import type TaskContext from "../task_context.js";
import { sanitizeHtml } from "../sanitizer.js";
const parseString = xml2js.parseString;
interface OpmlXml {
@@ -29,7 +27,7 @@ interface OpmlOutline {
outline: OpmlOutline[];
}
async function importOpml(taskContext: TaskContext<"importNotes">, fileBuffer: string | Buffer, parentNote: BNote) {
async function importOpml(taskContext: TaskContext<"importNotes">, fileBuffer: string | Uint8Array, parentNote: BNote) {
const xml = await new Promise<OpmlXml>((resolve, reject) => {
parseString(fileBuffer, (err: any, result: OpmlXml) => {
if (err) {
@@ -65,7 +63,7 @@ async function importOpml(taskContext: TaskContext<"importNotes">, fileBuffer: s
throw new Error(`Unrecognized OPML version ${opmlVersion}`);
}
content = sanitize.sanitizeHtml(content || "");
content = sanitizeHtml(content || "");
const { note } = noteService.createNewNote({
parentNoteId,

View File

@@ -6,10 +6,10 @@ import { dirname } from "path";
import becca from "../../becca/becca.js";
import BNote from "../../becca/entities/bnote.js";
import TaskContext from "../task_context.js";
import cls from "../cls.js";
import sql_init from "../sql_init.js";
import single from "./single.js";
import stripBom from "strip-bom";
import { getContext } from "../context.js";
const scriptDir = dirname(fileURLToPath(import.meta.url));
async function testImport(fileName: string, mimetype: string) {
@@ -20,7 +20,7 @@ async function testImport(fileName: string, mimetype: string) {
});
return new Promise<{ buffer: Buffer; importedNote: BNote }>((resolve, reject) => {
cls.init(async () => {
getContext().init(async () => {
const rootNote = becca.getNote("root");
if (!rootNote) {
reject("Missing root note.");
@@ -36,6 +36,10 @@ async function testImport(fileName: string, mimetype: string) {
},
rootNote as BNote
);
if (importedNote === null) {
reject("Import failed.");
return;
}
resolve({
buffer,
importedNote

View File

@@ -1,16 +1,17 @@
import type { NoteType } from "@triliumnext/commons";
import { sanitize, utils } from "@triliumnext/core";
import type BNote from "../../becca/entities/bnote.js";
import imageService from "../../services/image.js";
import noteService from "../../services/notes.js";
import { processStringOrBuffer } from "../../services/utils.js";
import protectedSessionService from "../protected_session.js";
import type TaskContext from "../task_context.js";
import type { File } from "./common.js";
import markdownService from "./markdown.js";
import mimeService from "./mime.js";
import importUtils from "./utils.js";
import { getNoteTitle } from "../utils/index.js";
import { sanitizeHtml } from "../sanitizer.js";
import { processStringOrBuffer } from "../utils/binary.js";
function importSingleFile(taskContext: TaskContext<"importNotes">, file: File, parentNote: BNote) {
const mime = mimeService.getMime(file.originalname) || file.mimetype;
@@ -57,7 +58,7 @@ function importFile(taskContext: TaskContext<"importNotes">, file: File, parentN
const mime = mimeService.getMime(originalName) || file.mimetype;
const { note } = noteService.createNewNote({
parentNoteId: parentNote.noteId,
title: utils.getNoteTitle(originalName, mime === "application/pdf", { mime }),
title: getNoteTitle(originalName, mime === "application/pdf", { mime }),
content: file.buffer,
isProtected: parentNote.isProtected && protectedSessionService.isProtectedSessionAvailable(),
type: "file",
@@ -72,7 +73,7 @@ function importFile(taskContext: TaskContext<"importNotes">, file: File, parentN
}
function importCodeNote(taskContext: TaskContext<"importNotes">, file: File, parentNote: BNote) {
const title = utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const title = getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const content = processStringOrBuffer(file.buffer);
const detectedMime = mimeService.getMime(file.originalname) || file.mimetype;
const mime = mimeService.normalizeMimeType(detectedMime);
@@ -97,7 +98,7 @@ function importCodeNote(taskContext: TaskContext<"importNotes">, file: File, par
}
function importCustomType(taskContext: TaskContext<"importNotes">, file: File, parentNote: BNote, type: NoteType, mime: string) {
const title = utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const title = getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const content = processStringOrBuffer(file.buffer);
const { note } = noteService.createNewNote({
@@ -115,7 +116,7 @@ function importCustomType(taskContext: TaskContext<"importNotes">, file: File, p
}
function importPlainText(taskContext: TaskContext<"importNotes">, file: File, parentNote: BNote) {
const title = utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const title = getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const plainTextContent = processStringOrBuffer(file.buffer);
const htmlContent = convertTextToHtml(plainTextContent);
@@ -150,13 +151,13 @@ function convertTextToHtml(text: string) {
}
function importMarkdown(taskContext: TaskContext<"importNotes">, file: File, parentNote: BNote) {
const title = utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const title = getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const markdownContent = processStringOrBuffer(file.buffer);
let htmlContent = markdownService.renderToHtml(markdownContent, title);
if (taskContext.data?.safeImport) {
htmlContent = sanitize.sanitizeHtml(htmlContent);
htmlContent = sanitizeHtml(htmlContent);
}
const { note } = noteService.createNewNote({
@@ -179,12 +180,12 @@ function importHtml(taskContext: TaskContext<"importNotes">, file: File, parentN
// Try to get title from HTML first, fall back to filename
// We do this before sanitization since that turns all <h1>s into <h2>
const htmlTitle = importUtils.extractHtmlTitle(content);
const title = htmlTitle || utils.getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
const title = htmlTitle || getNoteTitle(file.originalname, !!taskContext.data?.replaceUnderscoresWithSpaces);
content = importUtils.handleH1(content, title);
if (taskContext?.data?.safeImport) {
content = sanitize.sanitizeHtml(content);
content = sanitizeHtml(content);
}
const { note } = noteService.createNewNote({

View File

@@ -1,6 +1,4 @@
"use strict";
import { unescapeHtml } from "../utils.js";
import { unescapeHtml } from "../utils";
function handleH1(content: string, title: string) {
let isFirstH1Handled = false;

View File

@@ -7,9 +7,9 @@ import zip, { removeTriliumTags } from "./zip.js";
import becca from "../../becca/becca.js";
import BNote from "../../becca/entities/bnote.js";
import TaskContext from "../task_context.js";
import cls from "../cls.js";
import sql_init from "../sql_init.js";
import { trimIndentation } from "@triliumnext/commons";
import { getContext } from "../context.js";
const scriptDir = dirname(fileURLToPath(import.meta.url));
async function testImport(fileName: string) {
@@ -19,7 +19,7 @@ async function testImport(fileName: string) {
});
return new Promise<{ importedNote: BNote; rootNote: BNote }>((resolve, reject) => {
cls.init(async () => {
getContext().init(async () => {
const rootNote = becca.getNote("root");
if (!rootNote) {
expect(rootNote).toBeTruthy();

View File

@@ -1,10 +1,6 @@
import { ALLOWED_NOTE_TYPES, type NoteType } from "@triliumnext/commons";
import { sanitize, utils } from "@triliumnext/core";
import path from "path";
import type { Stream } from "stream";
import yauzl from "yauzl";
import { basename, dirname } from "../utils/path.js";
import { getZipProvider } from "./zip_provider.js";
import becca from "../../becca/becca.js";
import BAttachment from "../../becca/entities/battachment.js";
@@ -12,16 +8,17 @@ import BAttribute from "../../becca/entities/battribute.js";
import BBranch from "../../becca/entities/bbranch.js";
import type BNote from "../../becca/entities/bnote.js";
import attributeService from "../../services/attributes.js";
import log from "../../services/log.js";
import { getLog } from "../../services/log.js";
import noteService from "../../services/notes.js";
import { newEntityId, processStringOrBuffer, unescapeHtml } from "../../services/utils.js";
import type AttributeMeta from "../meta/attribute_meta.js";
import type NoteMeta from "../meta/note_meta.js";
import { getNoteTitle, newEntityId, removeFileExtension, unescapeHtml } from "../../services/utils/index.js";
import { processStringOrBuffer } from "../../services/utils/binary.js";
import protectedSessionService from "../protected_session.js";
import type TaskContext from "../task_context.js";
import treeService from "../tree.js";
import markdownService from "./markdown.js";
import mimeService from "./mime.js";
import { AttributeMeta, NoteMeta } from "../../meta.js";
import { sanitizeHtml } from "../sanitizer.js";
interface MetaFile {
files: NoteMeta[];
@@ -31,7 +28,7 @@ interface ImportZipOpts {
preserveIds?: boolean;
}
async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Buffer, importRootNote: BNote, opts?: ImportZipOpts): Promise<BNote> {
async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Uint8Array, importRootNote: BNote, opts?: ImportZipOpts): Promise<BNote> {
/** maps from original noteId (in ZIP file) to newly generated noteId */
const noteIdMap: Record<string, string> = {};
/** type maps from original attachmentId (in ZIP file) to newly generated attachmentId */
@@ -140,7 +137,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
if (parentNoteMeta?.noteId) {
parentNoteId = parentNoteMeta.isImportRoot ? importRootNote.noteId : getNewNoteId(parentNoteMeta.noteId);
} else {
const parentPath = path.dirname(filePath);
const parentPath = dirname(filePath);
if (parentPath === ".") {
parentNoteId = importRootNote.noteId;
@@ -162,7 +159,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
// in case we lack metadata, we treat e.g. "Programming.html" and "Programming" as the same note
// (one data file, the other directory for children)
const filePathNoExt = utils.removeFileExtension(filePath);
const filePathNoExt = removeFileExtension(filePath);
if (filePathNoExt in createdPaths) {
return createdPaths[filePathNoExt];
@@ -199,7 +196,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
}
if (!attributeService.isAttributeType(attr.type)) {
log.error(`Unrecognized attribute type ${attr.type}`);
getLog().error(`Unrecognized attribute type ${attr.type}`);
continue;
}
@@ -217,8 +214,8 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
}
if (taskContext.data?.safeImport) {
attr.name = sanitize.sanitizeHtml(attr.name);
attr.value = sanitize.sanitizeHtml(attr.value);
attr.name = sanitizeHtml(attr.name);
attr.value = sanitizeHtml(attr.value);
}
attributes.push(attr);
@@ -234,7 +231,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
return;
}
const noteTitle = utils.getNoteTitle(filePath, !!taskContext.data?.replaceUnderscoresWithSpaces, noteMeta);
const noteTitle = getNoteTitle(filePath, !!taskContext.data?.replaceUnderscoresWithSpaces, noteMeta);
const parentNoteId = getParentNoteId(filePath, parentNoteMeta);
if (!parentNoteId) {
@@ -269,10 +266,10 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
url = url.substr(2);
}
absUrl = path.dirname(filePath);
absUrl = dirname(filePath);
while (url.startsWith("../")) {
absUrl = path.dirname(absUrl);
absUrl = dirname(absUrl);
url = url.substr(3);
}
@@ -318,7 +315,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
});
if (taskContext.data?.safeImport) {
content = sanitize.sanitizeHtml(content);
content = sanitizeHtml(content);
}
content = content.replace(/<html.*<body[^>]*>/gis, "");
@@ -333,7 +330,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
try {
url = decodeURIComponent(url).trim();
} catch (e: any) {
log.error(`Cannot parse image URL '${url}', keeping original. Error: ${e.message}.`);
getLog().error(`Cannot parse image URL '${url}', keeping original. Error: ${e.message}.`);
return `src="${url}"`;
}
@@ -344,9 +341,9 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
const target = getEntityIdFromRelativeUrl(url, filePath);
if (target.attachmentId) {
return `src="api/attachments/${target.attachmentId}/image/${path.basename(url)}"`;
return `src="api/attachments/${target.attachmentId}/image/${basename(url)}"`;
} else if (target.noteId) {
return `src="api/images/${target.noteId}/${path.basename(url)}"`;
return `src="api/images/${target.noteId}/${basename(url)}"`;
}
return match;
@@ -356,7 +353,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
try {
url = decodeURIComponent(url).trim();
} catch (e: any) {
log.error(`Cannot parse link URL '${url}', keeping original. Error: ${e.message}.`);
getLog().error(`Cannot parse link URL '${url}', keeping original. Error: ${e.message}.`);
return `href="${url}"`;
}
@@ -392,7 +389,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
return content;
}
function processNoteContent(noteMeta: NoteMeta | undefined, type: string, mime: string, content: string | Buffer, noteTitle: string, filePath: string) {
function processNoteContent(noteMeta: NoteMeta | undefined, type: string, mime: string, content: string | Uint8Array, noteTitle: string, filePath: string) {
if ((noteMeta?.format === "markdown" || (!noteMeta && taskContext.data?.textImportedAsText && ["text/markdown", "text/x-markdown", "text/mdx"].includes(mime))) && typeof content === "string") {
content = markdownService.renderToHtml(content, noteTitle);
}
@@ -414,7 +411,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
return content;
}
function saveNote(filePath: string, content: string | Buffer) {
function saveNote(filePath: string, content: string | Uint8Array) {
const { parentNoteMeta, noteMeta, attachmentMeta } = getMeta(filePath);
if (noteMeta?.noImport) {
@@ -467,7 +464,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
content = processStringOrBuffer(content);
}
const noteTitle = utils.getNoteTitle(filePath, taskContext.data?.replaceUnderscoresWithSpaces || false, noteMeta);
const noteTitle = getNoteTitle(filePath, taskContext.data?.replaceUnderscoresWithSpaces || false, noteMeta);
content = processNoteContent(noteMeta, type, mime, content, noteTitle || "", filePath);
@@ -551,46 +548,42 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
noteId,
type: "label",
name: "originalFileName",
value: path.basename(filePath)
value: basename(filePath)
});
}
}
// we're running two passes in order to obtain critical information first (meta file and root)
const topLevelItems = new Set<string>();
await readZipFile(fileBuffer, async (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => {
const zipProvider = getZipProvider();
await zipProvider.readZipFile(fileBuffer, async (entry, readContent) => {
const filePath = normalizeFilePath(entry.fileName);
// make sure that the meta file is loaded before the rest of the files is processed.
if (filePath === "!!!meta.json") {
const content = await readContent(zipfile, entry);
metaFile = JSON.parse(content.toString("utf-8"));
const content = await readContent();
metaFile = JSON.parse(new TextDecoder("utf-8").decode(content));
}
// determine the root of the .zip (i.e. if it has only one top-level folder then the root is that folder, or the root of the archive if there are multiple top-level folders).
const firstSlash = filePath.indexOf("/");
const topLevelPath = (firstSlash !== -1 ? filePath.substring(0, firstSlash) : filePath);
topLevelItems.add(topLevelPath);
zipfile.readEntry();
});
topLevelPath = (topLevelItems.size > 1 ? "" : topLevelItems.values().next().value ?? "");
await readZipFile(fileBuffer, async (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => {
await zipProvider.readZipFile(fileBuffer, async (entry, readContent) => {
const filePath = normalizeFilePath(entry.fileName);
if (/\/$/.test(entry.fileName)) {
saveDirectory(filePath);
} else if (filePath !== "!!!meta.json") {
const content = await readContent(zipfile, entry);
saveNote(filePath, content);
saveNote(filePath, await readContent());
}
taskContext.increaseProgressCount();
zipfile.readEntry();
});
for (const noteId of createdNoteIds) {
@@ -613,7 +606,7 @@ async function importZip(taskContext: TaskContext<"importNotes">, fileBuffer: Bu
if (attr.type !== "relation" || attr.value in becca.notes) {
new BAttribute(attr).save();
} else {
log.info(`Relation not imported since the target note doesn't exist: ${JSON.stringify(attr)}`);
getLog().info(`Relation not imported since the target note doesn't exist: ${JSON.stringify(attr)}`);
}
}
@@ -639,43 +632,6 @@ function normalizeFilePath(filePath: string): string {
return filePath;
}
function streamToBuffer(stream: Stream): Promise<Buffer> {
const chunks: Uint8Array[] = [];
stream.on("data", (chunk) => chunks.push(chunk));
return new Promise((res, rej) => stream.on("end", () => res(Buffer.concat(chunks))));
}
export function readContent(zipfile: yauzl.ZipFile, entry: yauzl.Entry): Promise<Buffer> {
return new Promise((res, rej) => {
zipfile.openReadStream(entry, (err, readStream) => {
if (err) rej(err);
if (!readStream) throw new Error("Unable to read content.");
streamToBuffer(readStream).then(res);
});
});
}
export function readZipFile(buffer: Buffer, processEntryCallback: (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => Promise<void>) {
return new Promise<void>((res, rej) => {
yauzl.fromBuffer(buffer, { lazyEntries: true, validateEntrySizes: false }, (err, zipfile) => {
if (err) rej(err);
if (!zipfile) throw new Error("Unable to read zip file.");
zipfile.readEntry();
zipfile.on("entry", async (entry) => {
try {
await processEntryCallback(zipfile, entry);
} catch (e) {
rej(e);
}
});
zipfile.on("end", res);
});
});
}
function resolveNoteType(type: string | undefined): NoteType {
// BC for ZIPs created in Trilium 0.57 and older
switch (type) {

View File

@@ -0,0 +1,25 @@
export interface ZipEntry {
fileName: string;
}
export interface ZipProvider {
/**
* Iterates over every entry in a ZIP buffer, calling `processEntry` for each one.
* `readContent()` inside the callback reads the raw bytes of that entry on demand.
*/
readZipFile(
buffer: Uint8Array,
processEntry: (entry: ZipEntry, readContent: () => Promise<Uint8Array>) => Promise<void>
): Promise<void>;
}
let zipProvider: ZipProvider | null = null;
export function initZipProvider(provider: ZipProvider) {
zipProvider = provider;
}
export function getZipProvider(): ZipProvider {
if (!zipProvider) throw new Error("ZipProvider not initialized.");
return zipProvider;
}

View File

@@ -312,6 +312,26 @@ export class SqlService {
}
}
/**
* Async-safe transaction wrapper for use in Web Workers and other single-threaded async contexts.
* Uses manual BEGIN/COMMIT/ROLLBACK because the synchronous `transactional()` cannot await promises.
*/
async transactionalAsync<T>(func: () => Promise<T>): Promise<T> {
this.execute("BEGIN IMMEDIATE");
try {
const result = await func();
this.execute("COMMIT");
if (!this.dbConnection.inTransaction) {
this.params.onTransactionCommit();
}
return result;
} catch (e) {
this.execute("ROLLBACK");
this.params.onTransactionRollback();
throw e;
}
}
fillParamList(paramIds: string[] | Set<string>, truncate = true) {
if ("length" in paramIds && paramIds.length === 0) {
return;

View File

@@ -15,11 +15,16 @@ import migrationService from "./migration";
export const dbReady = deferred<void>();
let schema: string;
let getDemoArchive: (() => Promise<Uint8Array | null>) | null = null;
export function initSchema(schemaStr: string) {
schema = schemaStr;
}
export function initDemoArchive(fn: () => Promise<Uint8Array | null>) {
getDemoArchive = fn;
}
function schemaExists() {
return !!getSql().getValue(/*sql*/`SELECT name FROM sqlite_master
WHERE type = 'table' AND name = 'options'`);
@@ -177,21 +182,23 @@ async function createInitialDatabase(skipDemoDb?: boolean) {
});
// Import demo content.
log.info("Importing demo content...");
if (!skipDemoDb && getDemoArchive) {
log.info("Importing demo content...");
const demoFile = await getDemoArchive();
if (demoFile) {
const { default: zipImportService } = await import("./import/zip.js");
const dummyTaskContext = new TaskContext("no-progress-reporting", "importNotes", null);
await zipImportService.importZip(dummyTaskContext, demoFile, rootNote);
}
}
const dummyTaskContext = new TaskContext("no-progress-reporting", "importNotes", null);
// if (demoFile) {
// await zipImportService.importZip(dummyTaskContext, demoFile, rootNote);
// }
// Post-demo.
// Post-demo: pick the first visible (non-system) child of root as the start note.
// System notes have IDs starting with "_" and should not be navigated to on startup.
// Falls back to "root" if no visible child exists (e.g. empty database).
sql.transactional(() => {
// this needs to happen after ZIP import,
// the previous solution was to move option initialization here, but then the important parts of initialization
// are not all in one transaction (because ZIP import is async and thus not transactional)
const startNoteId = sql.getValue("SELECT noteId FROM branches WHERE parentNoteId = 'root' AND isDeleted = 0 ORDER BY notePosition");
const startNoteId = sql.getValue<string | null>(
"SELECT noteId FROM branches WHERE parentNoteId = 'root' AND isDeleted = 0 AND substr(noteId, 1, 1) != '_' ORDER BY notePosition"
) ?? "root";
optionService.setOption(
"openNoteContexts",

View File

@@ -1,3 +1,6 @@
import chardet from "chardet";
import stripBom from "strip-bom";
const utf8Decoder = new TextDecoder("utf-8");
const utf8Encoder = new TextEncoder();
@@ -59,3 +62,32 @@ export function wrapStringOrBuffer(stringOrBuffer: string | Uint8Array) {
return stringOrBuffer;
}
}
/**
* For buffers, they are scanned for a supported encoding and decoded (UTF-8, UTF-16). In some cases, the BOM is also stripped.
*
* For strings, they are returned immediately without any transformation.
*
* For nullish values, an empty string is returned.
*
* @param data the string or buffer to process.
* @returns the string representation of the buffer, or the same string is it's a string.
*/
export function processStringOrBuffer(data: string | Uint8Array | null) {
if (!data) {
return "";
}
if (typeof data === "string") {
return data;
}
const detectedEncoding = chardet.detect(data);
switch (detectedEncoding) {
case "UTF-16LE":
return stripBom(new TextDecoder("utf-16le").decode(data));
case "UTF-8":
default:
return utf8Decoder.decode(data);
}
}

View File

@@ -20,6 +20,11 @@ export function hash(text: string) {
return encodeBase64(getCrypto().createHash("sha1", text.normalize()));
}
export function md5(content: string | Uint8Array) {
const bytes = getCrypto().createHash("md5", content);
return Array.from(bytes).map((b) => b.toString(16).padStart(2, "0")).join("");
}
export function isStringNote(type: string | undefined, mime: string) {
return (type && STRING_NOTE_TYPES.has(type)) || mime.startsWith("text/") || STRING_MIME_TYPES.has(mime);
}

View File

@@ -16,3 +16,12 @@ export function basename(filePath: string): string {
const lastSlash = Math.max(filePath.lastIndexOf("/"), filePath.lastIndexOf("\\"));
return filePath.substring(lastSlash + 1);
}
/** Returns the directory part of a file path, or "." if there is none. */
export function dirname(filePath: string): string {
const normalized = filePath.replace(/\\/g, "/");
const lastSlash = normalized.lastIndexOf("/");
if (lastSlash === -1) return ".";
if (lastSlash === 0) return "/";
return normalized.substring(0, lastSlash);
}

View File

@@ -3,6 +3,7 @@
"compilerOptions": {
"module": "ESNext",
"moduleResolution": "bundler",
"target": "ES2020",
"rootDir": "src",
"outDir": "dist",
"tsBuildInfoFile": "dist/tsconfig.lib.tsbuildinfo",

View File

@@ -2,6 +2,9 @@
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"outDir": "./out-tsc/vitest",
"module": "ESNext",
"moduleResolution": "bundler",
"target": "ES2020",
"types": [
"vitest"
],

33
pnpm-lock.yaml generated
View File

@@ -489,6 +489,9 @@ importers:
draggabilly:
specifier: 3.0.0
version: 3.0.0
fflate:
specifier: 0.8.2
version: 0.8.2
force-graph:
specifier: 1.51.2
version: 1.51.2
@@ -507,6 +510,9 @@ importers:
jquery.fancytree:
specifier: 2.38.5
version: 2.38.5(jquery@4.0.0)
js-md5:
specifier: 0.8.3
version: 0.8.3
js-sha1:
specifier: 0.7.0
version: 0.7.0
@@ -878,9 +884,6 @@ importers:
bootstrap:
specifier: 5.3.8
version: 5.3.8(@popperjs/core@2.11.8)
chardet:
specifier: 2.1.1
version: 2.1.1
cheerio:
specifier: 1.2.0
version: 1.2.0
@@ -1001,9 +1004,6 @@ importers:
stream-throttle:
specifier: 0.1.3
version: 0.1.3
strip-bom:
specifier: 5.0.0
version: 5.0.0
striptags:
specifier: 3.2.0
version: 3.2.0
@@ -1707,6 +1707,9 @@ importers:
async-mutex:
specifier: 0.5.0
version: 0.5.0
chardet:
specifier: 2.1.1
version: 2.1.1
escape-html:
specifier: 1.0.3
version: 1.0.3
@@ -1725,6 +1728,9 @@ importers:
sanitize-html:
specifier: 2.17.2
version: 2.17.2
strip-bom:
specifier: 5.0.0
version: 5.0.0
unescape:
specifier: 1.0.1
version: 1.0.1
@@ -11173,6 +11179,9 @@ packages:
resolution: {integrity: sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g==}
engines: {node: '>=0.10.0'}
js-md5@0.8.3:
resolution: {integrity: sha512-qR0HB5uP6wCuRMrWPTrkMaev7MJZwJuuw4fnwAzRgP4J4/F8RwtodOKpGp4XpqsLBFzzgqIO42efFAyz2Et6KQ==}
js-sha1@0.7.0:
resolution: {integrity: sha512-oQZ1Mo7440BfLSv9TX87VNEyU52pXPVG19F9PL3gTgNt0tVxlZ8F4O6yze3CLuLx28TxotxvlyepCNaaV0ZjMw==}
@@ -17625,6 +17634,8 @@ snapshots:
'@ckeditor/ckeditor5-widget': 47.6.1
ckeditor5: 47.6.1
es-toolkit: 1.39.5
transitivePeerDependencies:
- supports-color
'@ckeditor/ckeditor5-import-word@47.6.1':
dependencies:
@@ -17648,6 +17659,8 @@ snapshots:
'@ckeditor/ckeditor5-ui': 47.6.1
'@ckeditor/ckeditor5-utils': 47.6.1
ckeditor5: 47.6.1
transitivePeerDependencies:
- supports-color
'@ckeditor/ckeditor5-inspector@5.0.0': {}
@@ -17658,6 +17671,8 @@ snapshots:
'@ckeditor/ckeditor5-ui': 47.6.1
'@ckeditor/ckeditor5-utils': 47.6.1
ckeditor5: 47.6.1
transitivePeerDependencies:
- supports-color
'@ckeditor/ckeditor5-line-height@47.6.1':
dependencies:
@@ -17682,6 +17697,8 @@ snapshots:
'@ckeditor/ckeditor5-widget': 47.6.1
ckeditor5: 47.6.1
es-toolkit: 1.39.5
transitivePeerDependencies:
- supports-color
'@ckeditor/ckeditor5-list-multi-level@47.6.1':
dependencies:
@@ -17752,8 +17769,6 @@ snapshots:
'@ckeditor/ckeditor5-utils': 47.6.1
ckeditor5: 47.6.1
es-toolkit: 1.39.5
transitivePeerDependencies:
- supports-color
'@ckeditor/ckeditor5-merge-fields@47.6.1':
dependencies:
@@ -29147,6 +29162,8 @@ snapshots:
js-levenshtein@1.1.6: {}
js-md5@0.8.3: {}
js-sha1@0.7.0: {}
js-sha256@0.11.1: {}

View File

@@ -26,14 +26,24 @@ const filtered = lines.filter(
let errorIndex = 0;
const numbered: string[] = [];
const seen = new Set<string>();
let skipContinuation = false;
for (const line of filtered) {
if (ERROR_LINE_PATTERN.test(line)) {
if (seen.has(line)) {
skipContinuation = true;
continue;
}
seen.add(line);
skipContinuation = false;
errorIndex++;
numbered.push(`[${errorIndex}] ${line}`);
} else if (line.trim()) {
// Continuation line (indented context for multi-line errors)
numbered.push(line);
if (!skipContinuation) {
numbered.push(line);
}
}
}