Merge branch 'master' into next61

# Conflicts:
#	package-lock.json
#	src/public/app/services/note_content_renderer.js
#	src/public/app/widgets/note_tree.js
#	src/routes/routes.js
#	src/services/consistency_checks.js
#	src/services/notes.js
#	src/services/task_context.js
This commit is contained in:
zadam
2023-07-10 18:20:36 +02:00
41 changed files with 253 additions and 192 deletions

View File

@@ -395,7 +395,7 @@ class ConsistencyChecks {
({noteId, isProtected, type, mime}) => {
if (this.autoFix) {
// it might be possible that the blob is not available only because of the interrupted
// sync, and it will come later. It's therefore important to guarantee that this artifical
// sync, and it will come later. It's therefore important to guarantee that this artificial
// record won't overwrite the real one coming from the sync.
const fakeDate = "2000-01-01 00:00:00Z";

View File

@@ -57,5 +57,7 @@ function sanitize(dirtyHtml) {
module.exports = {
sanitize,
sanitizeUrl
sanitizeUrl: url => {
return sanitizeUrl(url).trim();
}
};

View File

@@ -83,7 +83,7 @@ const defaultOptions = [
{ name: 'compressImages', value: 'true', isSynced: true },
{ name: 'downloadImagesAutomatically', value: 'true', isSynced: true },
{ name: 'minTocHeadings', value: '5', isSynced: true },
{ name: 'highlightedText', value: '["bold","italic","underline","color","bgColor"]', isSynced: true },
{ name: 'highlightsList', value: '["bold","italic","underline","color","bgColor"]', isSynced: true },
{ name: 'checkForUpdates', value: 'true', isSynced: true },
{ name: 'disableTray', value: 'false', isSynced: false },
{ name: 'eraseUnusedAttachmentsAfterSeconds', value: '2592000', isSynced: true },

View File

@@ -55,7 +55,7 @@ ${bundle.script}\r
}
/**
* THIS METHOD CANT BE ASYNC, OTHERWISE TRANSACTION WRAPPER WON'T BE EFFECTIVE AND WE WILL BE LOSING THE
* THIS METHOD CAN'T BE ASYNC, OTHERWISE TRANSACTION WRAPPER WON'T BE EFFECTIVE AND WE WILL BE LOSING THE
* ENTITY CHANGES IN CLS.
*
* This method preserves frontend startNode - that's why we start execution from currentNote and override

View File

@@ -19,20 +19,22 @@ class NoteFlatTextExp extends Expression {
/**
* @param {BNote} note
* @param {string[]} tokens
* @param {string[]} path
* @param {string[]} remainingTokens - tokens still needed to be found in the path towards root
* @param {string[]} takenPath - path so far taken towards from candidate note towards the root.
* It contains the suffix fragment of the full note path.
*/
const searchDownThePath = (note, tokens, path) => {
if (tokens.length === 0) {
const retPath = this.getNotePath(note, path);
const searchPathTowardsRoot = (note, remainingTokens, takenPath) => {
if (remainingTokens.length === 0) {
// we're done, just build the result
const resultPath = this.getNotePath(note, takenPath);
if (retPath) {
const noteId = retPath[retPath.length - 1];
if (resultPath) {
const noteId = resultPath[resultPath.length - 1];
if (!resultNoteSet.hasNoteId(noteId)) {
// we could get here from multiple paths, the first one wins because the paths
// are sorted by importance
executionContext.noteIdToNotePath[noteId] = retPath;
executionContext.noteIdToNotePath[noteId] = resultPath;
resultNoteSet.add(becca.notes[noteId]);
}
@@ -42,22 +44,23 @@ class NoteFlatTextExp extends Expression {
}
if (note.parents.length === 0 || note.noteId === 'root') {
// we've reached root, but there are still remaining tokens -> this candidate note produced no result
return;
}
const foundAttrTokens = [];
for (const token of tokens) {
for (const token of remainingTokens) {
if (note.type.includes(token) || note.mime.includes(token)) {
foundAttrTokens.push(token);
}
}
for (const attribute of note.ownedAttributes) {
for (const attribute of note.getOwnedAttributes()) {
const normalizedName = utils.normalize(attribute.name);
const normalizedValue = utils.normalize(attribute.value);
for (const token of tokens) {
for (const token of remainingTokens) {
if (normalizedName.includes(token) || normalizedValue.includes(token)) {
foundAttrTokens.push(token);
}
@@ -68,19 +71,19 @@ class NoteFlatTextExp extends Expression {
const title = utils.normalize(beccaService.getNoteTitle(note.noteId, parentNote.noteId));
const foundTokens = foundAttrTokens.slice();
for (const token of tokens) {
for (const token of remainingTokens) {
if (title.includes(token)) {
foundTokens.push(token);
}
}
if (foundTokens.length > 0) {
const remainingTokens = tokens.filter(token => !foundTokens.includes(token));
const newRemainingTokens = remainingTokens.filter(token => !foundTokens.includes(token));
searchDownThePath(parentNote, remainingTokens, [...path, note.noteId]);
searchPathTowardsRoot(parentNote, newRemainingTokens, [note.noteId, ...takenPath]);
}
else {
searchDownThePath(parentNote, tokens, [...path, note.noteId]);
searchPathTowardsRoot(parentNote, remainingTokens, [note.noteId, ...takenPath]);
}
}
}
@@ -90,7 +93,7 @@ class NoteFlatTextExp extends Expression {
for (const note of candidateNotes) {
// autocomplete should be able to find notes by their noteIds as well (only leafs)
if (this.tokens.length === 1 && note.noteId.toLowerCase() === this.tokens[0]) {
searchDownThePath(note, [], []);
searchPathTowardsRoot(note, [], [note.noteId]);
continue;
}
@@ -123,7 +126,7 @@ class NoteFlatTextExp extends Expression {
if (foundTokens.length > 0) {
const remainingTokens = this.tokens.filter(token => !foundTokens.includes(token));
searchDownThePath(parentNote, remainingTokens, [note.noteId]);
searchPathTowardsRoot(parentNote, remainingTokens, [note.noteId]);
}
}
}
@@ -131,14 +134,22 @@ class NoteFlatTextExp extends Expression {
return resultNoteSet;
}
getNotePath(note, path) {
if (path.length === 0) {
/**
* @param {BNote} note
* @param {string[]} takenPath
* @returns {string[]}
*/
getNotePath(note, takenPath) {
if (takenPath.length === 0) {
throw new Error("Path is not expected to be empty.");
} else if (takenPath.length === 1 && takenPath[0] === note.noteId) {
return note.getBestNotePath();
} else {
const closestNoteId = path[0];
const closestNoteBestNotePath = becca.getNote(closestNoteId).getBestNotePath();
// this note is the closest to root containing the last matching token(s), thus completing the requirements
// what's in this note's predecessors does not matter, thus we'll choose the best note path
const topMostMatchingTokenNotePath = becca.getNote(takenPath[0]).getBestNotePath();
return [...closestNoteBestNotePath, ...path.slice(1)];
return [...topMostMatchingTokenNotePath, ...takenPath.slice(1)];
}
}

View File

@@ -10,7 +10,6 @@ const becca = require('../../../becca/becca');
const beccaService = require('../../../becca/becca_service');
const utils = require('../../utils');
const log = require('../../log');
const scriptService = require("../../script");
const hoistedNoteService = require("../../hoisted_note");
function searchFromNote(note) {
@@ -73,6 +72,7 @@ function searchFromRelation(note, relationName) {
return [];
}
const scriptService = require("../../script"); // to avoid circular dependency
const result = scriptService.executeNote(scriptNote, {originEntity: note});
if (!Array.isArray(result)) {

View File

@@ -13,7 +13,7 @@ class TaskContext {
this.noteDeletionHandlerTriggered = false;
// progressCount is meant to represent just some progress - to indicate the task is not stuck
this.progressCount = -1; // we're incrementing immediatelly
this.progressCount = -1; // we're incrementing immediately
this.lastSentCountTs = 0; // 0 will guarantee the first message will be sent
// just the fact this has been initialized is a progress which should be sent to clients