mirror of
https://github.com/NodeBB/NodeBB.git
synced 2025-11-12 17:05:51 +01:00
Change post owner (#7752)
* feat: #7749, allow array of keys for setObject * feat: sortedSetRemoveBulk * feat: test for bulk remove * feat: #7083, ability to change post ownership * feat: #7083, fix tid:<tid>:posters * feat: #7083, front end * fix: #7752, psql methods * fix: add missing await * fix: maybe psql
This commit is contained in:
committed by
GitHub
parent
5b746d53e7
commit
53e1b349ae
@@ -14,7 +14,14 @@ module.exports = function (db, module) {
|
||||
}
|
||||
|
||||
const writeData = helpers.serializeData(data);
|
||||
await db.collection('objects').updateOne({ _key: key }, { $set: writeData }, { upsert: true, w: 1 });
|
||||
if (Array.isArray(key)) {
|
||||
var bulk = db.collection('objects').initializeUnorderedBulkOp();
|
||||
key.forEach(key => bulk.find({ _key: key }).upsert().updateOne({ $set: writeData }));
|
||||
await bulk.execute();
|
||||
} else {
|
||||
await db.collection('objects').updateOne({ _key: key }, { $set: writeData }, { upsert: true, w: 1 });
|
||||
}
|
||||
|
||||
cache.delObjectCache(key);
|
||||
};
|
||||
|
||||
|
||||
@@ -49,4 +49,13 @@ module.exports = function (db, module) {
|
||||
|
||||
await db.collection('objects').deleteMany(query);
|
||||
};
|
||||
|
||||
module.sortedSetRemoveBulk = async function (data) {
|
||||
if (!Array.isArray(data) || !data.length) {
|
||||
return;
|
||||
}
|
||||
var bulk = db.collection('objects').initializeUnorderedBulkOp();
|
||||
data.forEach(item => bulk.find({ _key: item[0], value: String(item[1]) }).remove());
|
||||
await bulk.execute();
|
||||
};
|
||||
};
|
||||
|
||||
@@ -14,17 +14,24 @@ module.exports = function (db, module) {
|
||||
|
||||
await module.transaction(async function (client) {
|
||||
var query = client.query.bind(client);
|
||||
|
||||
await helpers.ensureLegacyObjectType(client, key, 'hash');
|
||||
await query({
|
||||
name: 'setObject',
|
||||
text: `
|
||||
INSERT INTO "legacy_hash" ("_key", "data")
|
||||
VALUES ($1::TEXT, $2::TEXT::JSONB)
|
||||
ON CONFLICT ("_key")
|
||||
DO UPDATE SET "data" = "legacy_hash"."data" || $2::TEXT::JSONB`,
|
||||
values: [key, JSON.stringify(data)],
|
||||
});
|
||||
const dataString = JSON.stringify(data);
|
||||
async function setOne(key) {
|
||||
await helpers.ensureLegacyObjectType(client, key, 'hash');
|
||||
await query({
|
||||
name: 'setObject',
|
||||
text: `
|
||||
INSERT INTO "legacy_hash" ("_key", "data")
|
||||
VALUES ($1::TEXT, $2::TEXT::JSONB)
|
||||
ON CONFLICT ("_key")
|
||||
DO UPDATE SET "data" = "legacy_hash"."data" || $2::TEXT::JSONB`,
|
||||
values: [key, dataString],
|
||||
});
|
||||
}
|
||||
if (Array.isArray(key)) {
|
||||
await Promise.all(key.map(k => setOne(k)));
|
||||
} else {
|
||||
await setOne(key);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
@@ -35,16 +42,25 @@ DO UPDATE SET "data" = "legacy_hash"."data" || $2::TEXT::JSONB`,
|
||||
|
||||
await module.transaction(async function (client) {
|
||||
var query = client.query.bind(client);
|
||||
await helpers.ensureLegacyObjectType(client, key, 'hash');
|
||||
await query({
|
||||
name: 'setObjectField',
|
||||
text: `
|
||||
INSERT INTO "legacy_hash" ("_key", "data")
|
||||
VALUES ($1::TEXT, jsonb_build_object($2::TEXT, $3::TEXT::JSONB))
|
||||
ON CONFLICT ("_key")
|
||||
DO UPDATE SET "data" = jsonb_set("legacy_hash"."data", ARRAY[$2::TEXT], $3::TEXT::JSONB)`,
|
||||
values: [key, field, JSON.stringify(value)],
|
||||
});
|
||||
const valueString = JSON.stringify(value);
|
||||
async function setOne(key) {
|
||||
await helpers.ensureLegacyObjectType(client, key, 'hash');
|
||||
await query({
|
||||
name: 'setObjectField',
|
||||
text: `
|
||||
INSERT INTO "legacy_hash" ("_key", "data")
|
||||
VALUES ($1::TEXT, jsonb_build_object($2::TEXT, $3::TEXT::JSONB))
|
||||
ON CONFLICT ("_key")
|
||||
DO UPDATE SET "data" = jsonb_set("legacy_hash"."data", ARRAY[$2::TEXT], $3::TEXT::JSONB)`,
|
||||
values: [key, field, valueString],
|
||||
});
|
||||
}
|
||||
|
||||
if (Array.isArray(key)) {
|
||||
await Promise.all(key.map(k => setOne(k)));
|
||||
} else {
|
||||
await setOne(key);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
@@ -69,4 +69,27 @@ DELETE FROM "legacy_zset"
|
||||
values: [keys, min, max],
|
||||
});
|
||||
};
|
||||
|
||||
module.sortedSetRemoveBulk = async function (data) {
|
||||
// const keys = [];
|
||||
// const values = [];
|
||||
|
||||
// data.forEach(function (item) {
|
||||
// keys.push(item[0]);
|
||||
// values.push(item[1]);
|
||||
// });
|
||||
|
||||
const promises = data.map(item => module.sortedSetRemove(item[0], item[1]));
|
||||
await Promise.all(promises);
|
||||
|
||||
// TODO
|
||||
// await query({
|
||||
// name: 'sortedSetRemoveBulk',
|
||||
// text: `
|
||||
// DELETE FROM "legacy_zset"
|
||||
// SELECT k, v
|
||||
// FROM UNNEST($1::TEXT[], $2::TEXT[]) vs(k, v)`,
|
||||
// values: [keys, values],
|
||||
// });
|
||||
};
|
||||
};
|
||||
|
||||
@@ -27,7 +27,14 @@ module.exports = function (redisClient, module) {
|
||||
if (!Object.keys(data).length) {
|
||||
return;
|
||||
}
|
||||
await redisClient.async.hmset(key, data);
|
||||
if (Array.isArray(key)) {
|
||||
const batch = redisClient.batch();
|
||||
key.forEach(k => batch.hmset(k, data));
|
||||
await helpers.execBatch(batch);
|
||||
} else {
|
||||
await redisClient.async.hmset(key, data);
|
||||
}
|
||||
|
||||
cache.delObjectCache(key);
|
||||
};
|
||||
|
||||
@@ -35,7 +42,14 @@ module.exports = function (redisClient, module) {
|
||||
if (!field) {
|
||||
return;
|
||||
}
|
||||
await redisClient.async.hset(key, field, value);
|
||||
if (Array.isArray(key)) {
|
||||
const batch = redisClient.batch();
|
||||
key.forEach(k => batch.hset(k, field, value));
|
||||
await helpers.execBatch(batch);
|
||||
} else {
|
||||
await redisClient.async.hset(key, field, value);
|
||||
}
|
||||
|
||||
cache.delObjectCache(key);
|
||||
};
|
||||
|
||||
|
||||
@@ -34,4 +34,13 @@ module.exports = function (redisClient, module) {
|
||||
keys.forEach(k => batch.zremrangebyscore(k, min, max));
|
||||
await helpers.execBatch(batch);
|
||||
};
|
||||
|
||||
module.sortedSetRemoveBulk = async function (data) {
|
||||
if (!Array.isArray(data) || !data.length) {
|
||||
return;
|
||||
}
|
||||
const batch = redisClient.batch();
|
||||
data.forEach(item => batch.zrem(item[0], item[1]));
|
||||
await helpers.execBatch(batch);
|
||||
};
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user