mirror of
https://github.com/zadam/trilium.git
synced 2025-11-18 03:00:41 +01:00
Merge branch 'beta'
# Conflicts: # docs/backend_api/BAttachment.html # docs/backend_api/BNote.html # docs/backend_api/becca_entities_bnote.js.html # docs/frontend_api/FAttribute.html # docs/frontend_api/FBranch.html # docs/frontend_api/FNote.html # docs/frontend_api/FrontendScriptApi.html # docs/frontend_api/entities_fattachment.js.html # docs/frontend_api/entities_fattribute.js.html # docs/frontend_api/entities_fblob.js.html # docs/frontend_api/entities_fbranch.js.html # docs/frontend_api/entities_fnote.js.html # docs/frontend_api/global.html # docs/frontend_api/index.html # docs/frontend_api/services_frontend_script_api.js.html # package-lock.json
This commit is contained in:
@@ -1 +1 @@
|
||||
module.exports = { buildDate:"2023-09-06T23:57:29+02:00", buildRevision: "6fa9d996e84f87fcb73c3388a5170affd2c2f7cc" };
|
||||
module.exports = { buildDate:"2023-09-21T23:38:18+02:00", buildRevision: "79e5e3b65ff613cdb81e2afaa832037ccf06d7b8" };
|
||||
|
||||
@@ -12,6 +12,7 @@ const BBranch = require('../becca/entities/bbranch');
|
||||
const revisionService = require('./revisions');
|
||||
const becca = require("../becca/becca");
|
||||
const utils = require("../services/utils");
|
||||
const eraseService = require("../services/erase");
|
||||
const {sanitizeAttributeName} = require("./sanitize_attribute_name");
|
||||
const noteTypes = require("../services/note_types").getNoteTypeNames();
|
||||
|
||||
@@ -440,7 +441,7 @@ class ConsistencyChecks {
|
||||
this.findAndFixIssues(`
|
||||
SELECT notes.noteId, notes.type, notes.mime
|
||||
FROM notes
|
||||
JOIN blobs USING (blobId)
|
||||
JOIN blobs USING (blobId)
|
||||
WHERE isDeleted = 0
|
||||
AND isProtected = 0
|
||||
AND content IS NULL`,
|
||||
@@ -460,19 +461,36 @@ class ConsistencyChecks {
|
||||
}
|
||||
|
||||
this.findAndFixIssues(`
|
||||
SELECT revisions.revisionId
|
||||
SELECT revisions.revisionId, blobs.blobId
|
||||
FROM revisions
|
||||
LEFT JOIN blobs USING (blobId)
|
||||
WHERE blobs.blobId IS NULL`,
|
||||
({revisionId}) => {
|
||||
({revisionId, blobId}) => {
|
||||
if (this.autoFix) {
|
||||
revisionService.eraseRevisions([revisionId]);
|
||||
|
||||
this.reloadNeeded = true;
|
||||
|
||||
logFix(`Note revision content '${revisionId}' was set to erased since its content did not exist.`);
|
||||
logFix(`Note revision '${revisionId}' was erased since the referenced blob '${blobId}' did not exist.`);
|
||||
} else {
|
||||
logError(`Note revision content '${revisionId}' does not exist`);
|
||||
logError(`Note revision '${revisionId}' blob '${blobId}' does not exist`);
|
||||
}
|
||||
});
|
||||
|
||||
this.findAndFixIssues(`
|
||||
SELECT attachments.attachmentId, blobs.blobId
|
||||
FROM attachments
|
||||
LEFT JOIN blobs USING (blobId)
|
||||
WHERE blobs.blobId IS NULL`,
|
||||
({attachmentId, blobId}) => {
|
||||
if (this.autoFix) {
|
||||
eraseService.eraseAttachments([attachmentId]);
|
||||
|
||||
this.reloadNeeded = true;
|
||||
|
||||
logFix(`Attachment '${attachmentId}' was erased since the referenced blob '${blobId}' did not exist.`);
|
||||
} else {
|
||||
logError(`Attachment '${attachmentId}' blob '${blobId}' does not exist`);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -183,5 +183,6 @@ module.exports = {
|
||||
eraseDeletedNotesNow,
|
||||
eraseUnusedAttachmentsNow,
|
||||
eraseNotesWithDeleteId,
|
||||
eraseUnusedBlobs
|
||||
eraseUnusedBlobs,
|
||||
eraseAttachments
|
||||
};
|
||||
|
||||
@@ -895,6 +895,10 @@ async function asyncPostProcessContent(note, content) {
|
||||
|
||||
// all keys should be replaced by the corresponding values
|
||||
function replaceByMap(str, mapObj) {
|
||||
if (!mapObj) {
|
||||
return str;
|
||||
}
|
||||
|
||||
const re = new RegExp(Object.keys(mapObj).join("|"),"g");
|
||||
|
||||
return str.replace(re, matched => mapObj[matched]);
|
||||
|
||||
@@ -51,7 +51,7 @@ function eraseRevisions(revisionIdsToErase) {
|
||||
return;
|
||||
}
|
||||
|
||||
log.info(`Removing note revisions: ${JSON.stringify(revisionIdsToErase)}`);
|
||||
log.info(`Removing revisions: ${JSON.stringify(revisionIdsToErase)}`);
|
||||
|
||||
sql.executeMany(`DELETE FROM revisions WHERE revisionId IN (???)`, revisionIdsToErase);
|
||||
sql.executeMany(`UPDATE entity_changes SET isErased = 1, utcDateChanged = '${dateUtils.utcNowDateTime()}' WHERE entityName = 'revisions' AND entityId IN (???)`, revisionIdsToErase);
|
||||
|
||||
@@ -93,7 +93,7 @@ async function setupSyncFromSyncServer(syncServerHost, syncProxy, password) {
|
||||
return { result: 'success' };
|
||||
}
|
||||
catch (e) {
|
||||
log.error(`Sync failed: ${e.message}`);
|
||||
log.error(`Sync failed: '${e.message}', stack: ${e.stack}`);
|
||||
|
||||
return {
|
||||
result: 'failure',
|
||||
|
||||
@@ -26,7 +26,7 @@ const LOG_ALL_QUERIES = false;
|
||||
});
|
||||
|
||||
function insert(tableName, rec, replace = false) {
|
||||
const keys = Object.keys(rec);
|
||||
const keys = Object.keys(rec || {});
|
||||
if (keys.length === 0) {
|
||||
log.error(`Can't insert empty object into table ${tableName}`);
|
||||
return;
|
||||
@@ -53,7 +53,7 @@ function replace(tableName, rec) {
|
||||
}
|
||||
|
||||
function upsert(tableName, primaryKey, rec) {
|
||||
const keys = Object.keys(rec);
|
||||
const keys = Object.keys(rec || {});
|
||||
if (keys.length === 0) {
|
||||
log.error(`Can't upsert empty object into table ${tableName}`);
|
||||
return;
|
||||
|
||||
@@ -71,8 +71,7 @@ async function sync() {
|
||||
};
|
||||
}
|
||||
else {
|
||||
log.info(`sync failed: ${e.message}
|
||||
stack: ${e.stack}`);
|
||||
log.info(`Sync failed: '${e.message}', stack: ${e.stack}`);
|
||||
|
||||
ws.syncFailed();
|
||||
|
||||
@@ -127,8 +126,6 @@ async function doLogin() {
|
||||
}
|
||||
|
||||
async function pullChanges(syncContext) {
|
||||
let atLeastOnePullApplied = false;
|
||||
|
||||
while (true) {
|
||||
const lastSyncedPull = getLastSyncedPull();
|
||||
const logMarkerId = utils.randomString(10); // to easily pair sync events between client and server logs
|
||||
@@ -144,22 +141,7 @@ async function pullChanges(syncContext) {
|
||||
const pulledDate = Date.now();
|
||||
|
||||
sql.transactional(() => {
|
||||
for (const {entityChange, entity} of entityChanges) {
|
||||
const changeAppliedAlready = entityChange.changeId
|
||||
&& !!sql.getValue("SELECT 1 FROM entity_changes WHERE changeId = ?", [entityChange.changeId]);
|
||||
|
||||
if (changeAppliedAlready) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!atLeastOnePullApplied) { // send only for first
|
||||
ws.syncPullInProgress();
|
||||
|
||||
atLeastOnePullApplied = true;
|
||||
}
|
||||
|
||||
syncUpdateService.updateEntity(entityChange, entity, syncContext.instanceId);
|
||||
}
|
||||
syncUpdateService.updateEntities(entityChanges, syncContext.instanceId);
|
||||
|
||||
if (lastSyncedPull !== lastEntityChangeId) {
|
||||
setLastSyncedPull(lastEntityChangeId);
|
||||
|
||||
@@ -3,15 +3,51 @@ const log = require('./log');
|
||||
const entityChangesService = require('./entity_changes');
|
||||
const eventService = require('./events');
|
||||
const entityConstructor = require("../becca/entity_constructor");
|
||||
const ws = require("./ws");
|
||||
|
||||
function updateEntity(remoteEC, remoteEntityRow, instanceId) {
|
||||
function updateEntities(entityChanges, instanceId) {
|
||||
if (entityChanges.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
let atLeastOnePullApplied = false;
|
||||
const updateContext = {
|
||||
updated: {},
|
||||
alreadyUpdated: 0,
|
||||
erased: 0,
|
||||
alreadyErased: 0
|
||||
};
|
||||
|
||||
for (const {entityChange, entity} of entityChanges) {
|
||||
const changeAppliedAlready = entityChange.changeId
|
||||
&& !!sql.getValue("SELECT 1 FROM entity_changes WHERE changeId = ?", [entityChange.changeId]);
|
||||
|
||||
if (changeAppliedAlready) {
|
||||
updateContext.alreadyUpdated++;
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!atLeastOnePullApplied) { // avoid spamming and send only for first
|
||||
ws.syncPullInProgress();
|
||||
|
||||
atLeastOnePullApplied = true;
|
||||
}
|
||||
|
||||
updateEntity(entityChange, entity, instanceId, updateContext);
|
||||
}
|
||||
|
||||
logUpdateContext(updateContext);
|
||||
}
|
||||
|
||||
function updateEntity(remoteEC, remoteEntityRow, instanceId, updateContext) {
|
||||
if (!remoteEntityRow && remoteEC.entityName === 'options') {
|
||||
return; // can be undefined for options with isSynced=false
|
||||
}
|
||||
|
||||
const updated = remoteEC.entityName === 'note_reordering'
|
||||
? updateNoteReordering(remoteEC, remoteEntityRow, instanceId)
|
||||
: updateNormalEntity(remoteEC, remoteEntityRow, instanceId);
|
||||
: updateNormalEntity(remoteEC, remoteEntityRow, instanceId, updateContext);
|
||||
|
||||
if (updated) {
|
||||
if (remoteEntityRow?.isDeleted) {
|
||||
@@ -29,11 +65,12 @@ function updateEntity(remoteEC, remoteEntityRow, instanceId) {
|
||||
}
|
||||
}
|
||||
|
||||
function updateNormalEntity(remoteEC, remoteEntityRow, instanceId) {
|
||||
function updateNormalEntity(remoteEC, remoteEntityRow, instanceId, updateContext) {
|
||||
const localEC = sql.getRow(`SELECT * FROM entity_changes WHERE entityName = ? AND entityId = ?`, [remoteEC.entityName, remoteEC.entityId]);
|
||||
|
||||
if (!localEC?.isErased && remoteEC.isErased) {
|
||||
eraseEntity(remoteEC, instanceId);
|
||||
updateContext.erased++;
|
||||
|
||||
return true;
|
||||
} else if (localEC?.isErased && !remoteEC.isErased) {
|
||||
@@ -42,10 +79,15 @@ function updateNormalEntity(remoteEC, remoteEntityRow, instanceId) {
|
||||
|
||||
return false;
|
||||
} else if (localEC?.isErased && remoteEC.isErased) {
|
||||
updateContext.alreadyErased++;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!localEC || localEC.utcDateChanged <= remoteEC.utcDateChanged) {
|
||||
if (!remoteEntityRow) {
|
||||
throw new Error(`Empty entity row for: ${JSON.stringify(remoteEC)}`);
|
||||
}
|
||||
|
||||
if (remoteEC.entityName === 'blobs' && remoteEntityRow.content !== null) {
|
||||
// we always use a Buffer object which is different from normal saving - there we use a simple string type for
|
||||
// "string notes". The problem is that in general, it's not possible to detect whether a blob content
|
||||
@@ -61,6 +103,9 @@ function updateNormalEntity(remoteEC, remoteEntityRow, instanceId) {
|
||||
|
||||
sql.replace(remoteEC.entityName, remoteEntityRow);
|
||||
|
||||
updateContext.updated[remoteEC.entityName] = updateContext.updated[remoteEC.entityName] || [];
|
||||
updateContext.updated[remoteEC.entityName].push(remoteEC.entityId);
|
||||
|
||||
if (!localEC || localEC.utcDateChanged < remoteEC.utcDateChanged) {
|
||||
entityChangesService.putEntityChangeWithInstanceId(remoteEC, instanceId);
|
||||
}
|
||||
@@ -77,6 +122,10 @@ function updateNormalEntity(remoteEC, remoteEntityRow, instanceId) {
|
||||
}
|
||||
|
||||
function updateNoteReordering(remoteEC, remoteEntityRow, instanceId) {
|
||||
if (!remoteEntityRow) {
|
||||
throw new Error(`Empty note_reordering body for: ${JSON.stringify(remoteEC)}`);
|
||||
}
|
||||
|
||||
for (const key in remoteEntityRow) {
|
||||
sql.execute("UPDATE branches SET notePosition = ? WHERE branchId = ?", [remoteEntityRow[key], key]);
|
||||
}
|
||||
@@ -110,6 +159,15 @@ function eraseEntity(entityChange, instanceId) {
|
||||
entityChangesService.putEntityChangeWithInstanceId(entityChange, instanceId);
|
||||
}
|
||||
|
||||
function logUpdateContext(updateContext) {
|
||||
const message = JSON.stringify(updateContext)
|
||||
.replaceAll('"', '')
|
||||
.replaceAll(":", ": ")
|
||||
.replaceAll(",", ", ");
|
||||
|
||||
log.info(message.substr(1, message.length - 2));
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
updateEntity
|
||||
updateEntities
|
||||
};
|
||||
|
||||
@@ -25,6 +25,10 @@ function md5(content) {
|
||||
}
|
||||
|
||||
function hashedBlobId(content) {
|
||||
if (content === null || content === undefined) {
|
||||
content = "";
|
||||
}
|
||||
|
||||
// sha512 is faster than sha256
|
||||
const base64Hash = crypto.createHash('sha512').update(content).digest('base64');
|
||||
|
||||
|
||||
Reference in New Issue
Block a user