small sync fixes and refactorings

This commit is contained in:
zadam
2023-07-27 23:22:08 +02:00
parent 527718eff7
commit 8edb5428e5
23 changed files with 561 additions and 340 deletions

View File

@@ -1,6 +1,7 @@
const becca = require('../becca/becca');
const NotFoundError = require("../errors/not_found_error");
const protectedSessionService = require("./protected_session");
const utils = require("./utils");
function getBlobPojo(entityName, entityId) {
const entity = becca.getEntity(entityName, entityId);
@@ -45,7 +46,12 @@ function processContent(content, isProtected, isStringContent) {
}
}
function calculateContentHash({blobId, content}) {
return utils.hash(`${blobId}|${content.toString()}`);
}
module.exports = {
getBlobPojo,
processContent
processContent,
calculateContentHash
};

View File

@@ -5,6 +5,7 @@ const cls = require('./cls');
const utils = require('./utils');
const instanceId = require('./member_id');
const becca = require("../becca/becca");
const blobService = require("../services/blob");
let maxEntityChangeId = 0;
@@ -88,52 +89,53 @@ function fillEntityChanges(entityName, entityPrimaryKey, condition = '') {
cleanupEntityChangesForMissingEntities(entityName, entityPrimaryKey);
sql.transactional(() => {
const entityIds = sql.getColumn(`SELECT ${entityPrimaryKey} FROM ${entityName}`
+ (condition ? ` WHERE ${condition}` : ''));
const entityIds = sql.getColumn(`SELECT ${entityPrimaryKey} FROM ${entityName} ${condition}`);
let createdCount = 0;
for (const entityId of entityIds) {
const existingRows = sql.getValue("SELECT COUNT(1) FROM entity_changes WHERE entityName = ? AND entityId = ?", [entityName, entityId]);
// we don't want to replace existing entities (which would effectively cause full resync)
if (existingRows === 0) {
createdCount++;
let hash;
let utcDateChanged;
let isSynced;
if (entityName === 'blobs') {
// FIXME: hacky, not sure if it might cause some problems
hash = "fake value";
utcDateChanged = dateUtils.utcNowDateTime();
isSynced = true; // contents are always synced
} else {
const entity = becca.getEntity(entityName, entityId);
if (entity) {
hash = entity?.generateHash() || "|deleted";
utcDateChanged = entity?.getUtcDateChanged() || dateUtils.utcNowDateTime();
isSynced = entityName !== 'options' || !!entity?.isSynced;
} else {
// entity might be null (not present in becca) when it's deleted
// FIXME: hacky, not sure if it might cause some problems
hash = "deleted";
utcDateChanged = dateUtils.utcNowDateTime();
isSynced = true; // deletable (the ones with isDeleted) entities are synced
}
}
addEntityChange({
entityName,
entityId,
hash: hash,
isErased: false,
utcDateChanged: utcDateChanged,
isSynced: isSynced
});
if (existingRows !== 0) {
// we don't want to replace existing entities (which would effectively cause full resync)
continue;
}
createdCount++;
let hash;
let utcDateChanged;
let isSynced;
if (entityName === 'blobs') {
const blob = sql.getRow("SELECT blobId, content, utcDateModified FROM blobs WHERE blobId = ?", [entityId]);
hash = blobService.calculateContentHash(blob);
utcDateChanged = blob.utcDateModified;
isSynced = true; // blobs are always synced
} else {
const entity = becca.getEntity(entityName, entityId);
if (entity) {
hash = entity?.generateHash() || "|deleted";
utcDateChanged = entity?.getUtcDateChanged() || dateUtils.utcNowDateTime();
isSynced = entityName !== 'options' || !!entity?.isSynced;
} else {
// entity might be null (not present in becca) when it's deleted
// FIXME: hacky, not sure if it might cause some problems
hash = "deleted";
utcDateChanged = dateUtils.utcNowDateTime();
isSynced = true; // deletable (the ones with isDeleted) entities are synced
}
}
addEntityChange({
entityName,
entityId,
hash: hash,
isErased: false,
utcDateChanged: utcDateChanged,
isSynced: isSynced
});
}
if (createdCount > 0) {
@@ -153,7 +155,7 @@ function fillAllEntityChanges() {
fillEntityChanges("blobs", "blobId");
fillEntityChanges("attributes", "attributeId");
fillEntityChanges("etapi_tokens", "etapiTokenId");
fillEntityChanges("options", "name", 'isSynced = 1');
fillEntityChanges("options", "name", 'WHERE isSynced = 1');
});
}

View File

@@ -65,7 +65,15 @@ function updateNormalEntity(remoteEntityChange, remoteEntityRow, instanceId) {
|| localEntityChange.hash !== remoteEntityChange.hash // sync error, we should still update
) {
if (remoteEntityChange.entityName === 'blobs') {
remoteEntityRow.content = handleContent(remoteEntityRow.content);
// we always use a Buffer object which is different from normal saving - there we use a simple string type for
// "string notes". The problem is that in general, it's not possible to detect whether a blob content
// is string note or note (syncs can arrive out of order)
remoteEntityRow.content = remoteEntityRow.content === null ? null : Buffer.from(remoteEntityRow.content, 'base64');
if (remoteEntityRow.content?.byteLength === 0) {
// there seems to be a bug which causes empty buffer to be stored as NULL which is then picked up as inconsistency
remoteEntityRow.content = "";
}
}
sql.transactional(() => {
@@ -92,20 +100,6 @@ function updateNoteReordering(entityChange, entity, instanceId) {
return true;
}
function handleContent(content) {
// we always use a Buffer object which is different from normal saving - there we use a simple string type for
// "string notes". The problem is that in general, it's not possible to detect whether a blob content
// is string note or note (syncs can arrive out of order)
content = content === null ? null : Buffer.from(content, 'base64');
if (content && content.byteLength === 0) {
// there seems to be a bug which causes empty buffer to be stored as NULL which is then picked up as inconsistency
content = "";
}
return content;
}
function eraseEntity(entityChange, instanceId) {
const {entityName, entityId} = entityChange;