mirror of
https://github.com/NodeBB/NodeBB.git
synced 2025-11-08 23:15:48 +01:00
feat: move export functions into child processes
This commit is contained in:
2
app.js
2
app.js
@@ -35,7 +35,7 @@ process.env.NODE_ENV = process.env.NODE_ENV || 'production';
|
||||
global.env = process.env.NODE_ENV || 'production';
|
||||
|
||||
// Alternate configuration file support
|
||||
const configFile = path.resolve(__dirname, nconf.any(['config', 'CONFIG']) || 'config.json');
|
||||
const configFile = path.resolve(__dirname, nconf.any(['config', 'CONFIG']) || 'config.json');
|
||||
|
||||
const configExists = file.existsSync(configFile) || (nconf.get('url') && nconf.get('secret') && nconf.get('database'));
|
||||
|
||||
|
||||
@@ -47,6 +47,9 @@
|
||||
"new_register_multiple": "There are <strong>%1</strong> registration requests awaiting review.",
|
||||
"flag_assigned_to_you": "<strong>Flag %1</strong> has been assigned to you",
|
||||
"post_awaiting_review": "Post awaiting review",
|
||||
"profile-exported": "<strong>%1</strong> profile exported, click to download",
|
||||
"posts-exported": "<strong>%1</strong> posts exported, click to download",
|
||||
"uploads-exported": "<strong>%1</strong> uploads exported, click to download",
|
||||
|
||||
"email-confirmed": "Email Confirmed",
|
||||
"email-confirmed-message": "Thank you for validating your email. Your account is now fully activated.",
|
||||
|
||||
@@ -200,6 +200,9 @@
|
||||
"consent.right_to_data_portability_description": "You may request from us a machine-readable export of any collected data about you and your account. You can do so by clicking the appropriate button below.",
|
||||
|
||||
"consent.export_profile": "Export Profile (.json)",
|
||||
"consent.export-profile-success": "Exporting profile, you will get a notification when it is complete.",
|
||||
"consent.export_uploads": "Export Uploaded Content (.zip)",
|
||||
"consent.export_posts": "Export Posts (.csv)"
|
||||
"consent.export-uploads-success": "Exporting uploads, you will get a notification when it is complete.",
|
||||
"consent.export_posts": "Export Posts (.csv)",
|
||||
"consent.export-posts-success": "Exporting posts, you will get a notification when it is complete."
|
||||
}
|
||||
|
||||
@@ -2941,7 +2941,7 @@ paths:
|
||||
get:
|
||||
tags:
|
||||
- users
|
||||
summary: Export a user's profile data (.csv)
|
||||
summary: Export a user's profile data (.json)
|
||||
parameters:
|
||||
- name: userslug
|
||||
in: path
|
||||
@@ -2951,9 +2951,9 @@ paths:
|
||||
example: admin
|
||||
responses:
|
||||
"200":
|
||||
description: "A CSV file containing the user profile"
|
||||
description: "A JSON file containing the user profile"
|
||||
content:
|
||||
text/csv:
|
||||
text/json:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
|
||||
@@ -16,6 +16,21 @@ define('forum/account/consent', ['forum/account/header'], function (header) {
|
||||
ajaxify.refresh();
|
||||
});
|
||||
});
|
||||
|
||||
handleExport($('[data-action="export-profile"]'), 'user.exportProfile', '[[user:consent.export-profile-success]]');
|
||||
handleExport($('[data-action="export-posts"]'), 'user.exportPosts', '[[user:consent.export-posts-success]]');
|
||||
handleExport($('[data-action="export-uploads"]'), 'user.exportUploads', '[[user:consent.export-uploads-success]]');
|
||||
|
||||
function handleExport(el, method, success) {
|
||||
el.on('click', function () {
|
||||
socket.emit(method, { uid: ajaxify.data.uid }, function (err) {
|
||||
if (err) {
|
||||
return app.alertError(err.message);
|
||||
}
|
||||
app.alertSuccess(success);
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return Consent;
|
||||
|
||||
@@ -1,18 +1,9 @@
|
||||
'use strict';
|
||||
|
||||
const _ = require('lodash');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const winston = require('winston');
|
||||
const json2csvAsync = require('json2csv').parseAsync;
|
||||
const archiver = require('archiver');
|
||||
|
||||
const db = require('../database');
|
||||
const user = require('../user');
|
||||
const meta = require('../meta');
|
||||
const posts = require('../posts');
|
||||
const batch = require('../batch');
|
||||
const events = require('../events');
|
||||
const privileges = require('../privileges');
|
||||
const accountHelpers = require('./accounts/helpers');
|
||||
|
||||
@@ -85,171 +76,34 @@ userController.getUserDataByUID = async function (callerUid, uid) {
|
||||
return userData;
|
||||
};
|
||||
|
||||
userController.exportPosts = async function (req, res) {
|
||||
var payload = [];
|
||||
await batch.processSortedSet('uid:' + res.locals.uid + ':posts', async function (pids) {
|
||||
let postData = await posts.getPostsData(pids);
|
||||
// Remove empty post references and convert newlines in content
|
||||
postData = postData.filter(Boolean).map(function (post) {
|
||||
post.content = '"' + String(post.content || '').replace(/\n/g, '\\n').replace(/"/g, '\\"') + '"';
|
||||
return post;
|
||||
});
|
||||
payload = payload.concat(postData);
|
||||
}, {
|
||||
batch: 500,
|
||||
});
|
||||
|
||||
const fields = payload.length ? Object.keys(payload[0]) : [];
|
||||
const opts = { fields };
|
||||
const csv = await json2csvAsync(payload, opts);
|
||||
res.set('Content-Type', 'text/csv').set('Content-Disposition', 'attachment; filename="' + res.locals.uid + '_posts.csv"').send(csv);
|
||||
userController.exportPosts = async function (req, res, next) {
|
||||
sendExport(res.locals.uid + '_posts.csv', 'text/csv', res, next);
|
||||
};
|
||||
|
||||
userController.exportUploads = function (req, res, next) {
|
||||
const targetUid = res.locals.uid;
|
||||
const archivePath = path.join(__dirname, '../../build/export', targetUid + '_uploads.zip');
|
||||
const archive = archiver('zip', {
|
||||
zlib: { level: 9 }, // Sets the compression level.
|
||||
});
|
||||
const maxAge = 1000 * 60 * 60 * 24; // 1 day
|
||||
const rootDirectory = path.join(__dirname, '../../public/uploads/');
|
||||
const trimPath = function (path) {
|
||||
return path.replace(rootDirectory, '');
|
||||
};
|
||||
let isFresh = false;
|
||||
const sendFile = function () {
|
||||
events.log({
|
||||
type: 'export:uploads',
|
||||
uid: req.uid,
|
||||
targetUid: targetUid,
|
||||
ip: req.ip,
|
||||
fresh: isFresh,
|
||||
});
|
||||
sendExport(res.locals.uid + '_uploads.zip', 'application/zip', res, next);
|
||||
};
|
||||
|
||||
res.sendFile(targetUid + '_uploads.zip', {
|
||||
root: path.join(__dirname, '../../build/export'),
|
||||
headers: {
|
||||
'Content-Disposition': 'attachment; filename=' + targetUid + '_uploads.zip',
|
||||
maxAge: maxAge,
|
||||
},
|
||||
});
|
||||
};
|
||||
userController.exportProfile = async function (req, res, next) {
|
||||
sendExport(res.locals.uid + '_profile.json', 'application/json', res, next);
|
||||
};
|
||||
|
||||
// Check for existing file, if exists and is < 1 day in age, send this instead
|
||||
try {
|
||||
fs.accessSync(archivePath, fs.constants.F_OK | fs.constants.R_OK);
|
||||
isFresh = (Date.now() - fs.statSync(archivePath).mtimeMs) < maxAge;
|
||||
if (isFresh) {
|
||||
return sendFile();
|
||||
}
|
||||
} catch (err) {
|
||||
// File doesn't exist, continue
|
||||
}
|
||||
|
||||
const output = fs.createWriteStream(archivePath);
|
||||
output.on('close', sendFile);
|
||||
|
||||
archive.on('warning', function (err) {
|
||||
switch (err.code) {
|
||||
case 'ENOENT':
|
||||
winston.warn('[user/export/uploads] File not found: ' + trimPath(err.path));
|
||||
break;
|
||||
|
||||
default:
|
||||
winston.warn('[user/export/uploads] Unexpected warning: ' + err.message);
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
archive.on('error', function (err) {
|
||||
switch (err.code) {
|
||||
case 'EACCES':
|
||||
winston.error('[user/export/uploads] File inaccessible: ' + trimPath(err.path));
|
||||
break;
|
||||
|
||||
default:
|
||||
winston.error('[user/export/uploads] Unable to construct archive: ' + err.message);
|
||||
break;
|
||||
}
|
||||
|
||||
res.sendStatus(500);
|
||||
});
|
||||
|
||||
archive.pipe(output);
|
||||
winston.verbose('[user/export/uploads] Collating uploads for uid ' + targetUid);
|
||||
user.collateUploads(targetUid, archive, function (err) {
|
||||
function sendExport(filename, type, res, next) {
|
||||
res.sendFile(filename, {
|
||||
root: path.join(__dirname, '../../build/export'),
|
||||
headers: {
|
||||
'Content-Type': type,
|
||||
'Content-Disposition': 'attachment; filename=' + filename,
|
||||
},
|
||||
}, function (err) {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
res.locals.isAPI = false;
|
||||
return next();
|
||||
}
|
||||
return next(err);
|
||||
}
|
||||
|
||||
archive.finalize();
|
||||
});
|
||||
};
|
||||
|
||||
userController.exportProfile = async function (req, res) {
|
||||
const targetUid = parseInt(res.locals.uid, 10);
|
||||
const [userData, userSettings, ips, sessions, usernames, emails, bookmarks, watchedTopics, upvoted, downvoted, following] = await Promise.all([
|
||||
db.getObject('user:' + targetUid),
|
||||
db.getObject('user:' + targetUid + ':settings'),
|
||||
user.getIPs(targetUid, 9),
|
||||
user.auth.getSessions(targetUid, req.sessionID),
|
||||
user.getHistory('user:' + targetUid + ':usernames'),
|
||||
user.getHistory('user:' + targetUid + ':emails'),
|
||||
getSetData('uid:' + targetUid + ':bookmarks', 'post:', targetUid),
|
||||
getSetData('uid:' + targetUid + ':followed_tids', 'topic:', targetUid),
|
||||
getSetData('uid:' + targetUid + ':upvote', 'post:', targetUid),
|
||||
getSetData('uid:' + targetUid + ':downvote', 'post:', targetUid),
|
||||
getSetData('following:' + targetUid, 'user:', targetUid),
|
||||
]);
|
||||
delete userData.password;
|
||||
const followingData = following.map(u => ({ username: u.username, uid: u.uid }));
|
||||
|
||||
let chatData = [];
|
||||
await batch.processSortedSet('uid:' + targetUid + ':chat:rooms', async (roomIds) => {
|
||||
var result = await Promise.all(roomIds.map(roomId => getRoomMessages(targetUid, roomId)));
|
||||
chatData = chatData.concat(_.flatten(result));
|
||||
}, { batch: 100 });
|
||||
|
||||
res.set('Content-Type', 'application/json')
|
||||
.set('Content-Disposition', 'attachment; filename="' + targetUid + '_profile.json"')
|
||||
.send({
|
||||
user: userData,
|
||||
settings: userSettings,
|
||||
ips: ips,
|
||||
sessions: sessions,
|
||||
usernames: usernames,
|
||||
emails: emails,
|
||||
messages: chatData,
|
||||
bookmarks: bookmarks,
|
||||
watchedTopics: watchedTopics,
|
||||
upvoted: upvoted,
|
||||
downvoted: downvoted,
|
||||
following: followingData,
|
||||
});
|
||||
};
|
||||
|
||||
async function getRoomMessages(uid, roomId) {
|
||||
let data = [];
|
||||
await batch.processSortedSet('uid:' + uid + ':chat:room:' + roomId + ':mids', async (mids) => {
|
||||
const messageData = await db.getObjects(mids.map(mid => 'message:' + mid));
|
||||
data = data.concat(messageData.filter(m => m && m.fromuid === uid && !m.system)
|
||||
.map(m => ({ content: m.content, timestamp: m.timestamp }))
|
||||
);
|
||||
}, { batch: 500 });
|
||||
return data;
|
||||
}
|
||||
|
||||
async function getSetData(set, keyPrefix, uid) {
|
||||
let data = [];
|
||||
await batch.processSortedSet(set, async (ids) => {
|
||||
if (keyPrefix === 'post:') {
|
||||
ids = await privileges.posts.filter('topics:read', ids, uid);
|
||||
} else if (keyPrefix === 'topic:') {
|
||||
ids = await privileges.topics.filterTids('topics:read', ids, uid);
|
||||
}
|
||||
data = data.concat(await db.getObjects(ids.map(id => keyPrefix + id)));
|
||||
}, { batch: 500 });
|
||||
return data;
|
||||
}
|
||||
|
||||
require('../promisify')(userController, [
|
||||
|
||||
@@ -33,7 +33,7 @@ function setupWinston() {
|
||||
}
|
||||
|
||||
winston.configure({
|
||||
level: nconf.get('log-level') || (global.env === 'production' ? 'info' : 'verbose'),
|
||||
level: nconf.get('log-level') || (process.env.NODE_ENV === 'production' ? 'info' : 'verbose'),
|
||||
format: winston.format.combine.apply(null, formats),
|
||||
transports: [
|
||||
new winston.transports.Console({
|
||||
@@ -71,10 +71,6 @@ function loadConfig(configFile) {
|
||||
nconf.set('upload_path', path.resolve(nconf.get('base_dir'), nconf.get('upload_path')));
|
||||
nconf.set('upload_url', '/assets/uploads');
|
||||
|
||||
if (nconf.get('url')) {
|
||||
nconf.set('url_parsed', url.parse(nconf.get('url')));
|
||||
}
|
||||
|
||||
// Explicitly cast 'jobsDisabled' as Bool
|
||||
var castAsBool = ['jobsDisabled'];
|
||||
nconf.stores.env.readOnly = false;
|
||||
@@ -87,6 +83,23 @@ function loadConfig(configFile) {
|
||||
nconf.stores.env.readOnly = true;
|
||||
|
||||
nconf.set('runJobs', nconf.get('isPrimary') === 'true' && !nconf.get('jobsDisabled'));
|
||||
|
||||
// nconf defaults, if not set in config
|
||||
if (!nconf.get('sessionKey')) {
|
||||
nconf.set('sessionKey', 'express.sid');
|
||||
}
|
||||
|
||||
if (nconf.get('url')) {
|
||||
nconf.set('url_parsed', url.parse(nconf.get('url')));
|
||||
// Parse out the relative_url and other goodies from the configured URL
|
||||
const urlObject = url.parse(nconf.get('url'));
|
||||
const relativePath = urlObject.pathname !== '/' ? urlObject.pathname.replace(/\/+$/, '') : '';
|
||||
nconf.set('base_url', urlObject.protocol + '//' + urlObject.host);
|
||||
nconf.set('secure', urlObject.protocol === 'https:');
|
||||
nconf.set('use_port', !!urlObject.port);
|
||||
nconf.set('relative_path', relativePath);
|
||||
nconf.set('port', nconf.get('PORT') || nconf.get('port') || urlObject.port || (nconf.get('PORT_ENV_VAR') ? nconf.get(nconf.get('PORT_ENV_VAR')) : false) || 4567);
|
||||
}
|
||||
}
|
||||
|
||||
function versionCheck() {
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
'use strict';
|
||||
|
||||
const winston = require('winston');
|
||||
|
||||
const user = require('../../user');
|
||||
const meta = require('../../meta');
|
||||
const events = require('../../events');
|
||||
const privileges = require('../../privileges');
|
||||
const notifications = require('../../notifications');
|
||||
const db = require('../../database');
|
||||
|
||||
module.exports = function (SocketUser) {
|
||||
SocketUser.changeUsernameEmail = async function (socket, data) {
|
||||
@@ -140,4 +144,59 @@ module.exports = function (SocketUser) {
|
||||
await user.blocks[isBlocked ? 'remove' : 'add'](data.blockeeUid, data.blockerUid);
|
||||
return !isBlocked;
|
||||
};
|
||||
|
||||
SocketUser.exportProfile = async function (socket, data) {
|
||||
await doExport(socket, data, 'profile');
|
||||
};
|
||||
|
||||
SocketUser.exportPosts = async function (socket, data) {
|
||||
await doExport(socket, data, 'posts');
|
||||
};
|
||||
|
||||
SocketUser.exportUploads = async function (socket, data) {
|
||||
await doExport(socket, data, 'uploads');
|
||||
};
|
||||
|
||||
async function doExport(socket, data, type) {
|
||||
if (!socket.uid) {
|
||||
throw new Error('[[error:invalid-uid]]');
|
||||
}
|
||||
|
||||
if (!data || !data.uid) {
|
||||
throw new Error('[[error:invalid-data]]');
|
||||
}
|
||||
|
||||
await user.isAdminOrSelf(socket.uid, data.uid);
|
||||
|
||||
const count = await db.incrObjectField('locks', 'export:' + data.uid + type);
|
||||
if (count > 1) {
|
||||
throw new Error('[[error:already-exporting]]');
|
||||
}
|
||||
|
||||
const child = require('child_process').fork('./src/user/jobs/export-' + type + '.js', [], {
|
||||
env: process.env,
|
||||
});
|
||||
child.send({ uid: data.uid });
|
||||
child.on('error', async function (err) {
|
||||
winston.error(err.stack);
|
||||
await db.deleteObjectField('locks', 'export:' + data.uid + type);
|
||||
});
|
||||
child.on('exit', async function () {
|
||||
await db.deleteObjectField('locks', 'export:' + data.uid + type);
|
||||
const userData = await user.getUserFields(data.uid, ['username', 'userslug']);
|
||||
const n = await notifications.create({
|
||||
bodyShort: '[[notifications:' + type + '-exported, ' + userData.username + ']]',
|
||||
path: '/api/user/uid/' + userData.userslug + '/export/' + type,
|
||||
nid: type + ':export:' + data.uid,
|
||||
from: data.uid,
|
||||
});
|
||||
await notifications.push(n, [socket.uid]);
|
||||
await events.log({
|
||||
type: 'export:' + type,
|
||||
uid: socket.uid,
|
||||
targetUid: data.uid,
|
||||
ip: socket.ip,
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
18
src/start.js
18
src/start.js
@@ -1,7 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const nconf = require('nconf');
|
||||
const url = require('url');
|
||||
const winston = require('winston');
|
||||
|
||||
const start = module.exports;
|
||||
@@ -9,8 +8,6 @@ const start = module.exports;
|
||||
start.start = async function () {
|
||||
const db = require('./database');
|
||||
|
||||
setupConfigs();
|
||||
|
||||
printStartupInfo();
|
||||
|
||||
addProcessHandlers();
|
||||
@@ -81,21 +78,6 @@ async function runUpgrades() {
|
||||
}
|
||||
}
|
||||
|
||||
function setupConfigs() {
|
||||
// nconf defaults, if not set in config
|
||||
if (!nconf.get('sessionKey')) {
|
||||
nconf.set('sessionKey', 'express.sid');
|
||||
}
|
||||
// Parse out the relative_url and other goodies from the configured URL
|
||||
const urlObject = url.parse(nconf.get('url'));
|
||||
const relativePath = urlObject.pathname !== '/' ? urlObject.pathname.replace(/\/+$/, '') : '';
|
||||
nconf.set('base_url', urlObject.protocol + '//' + urlObject.host);
|
||||
nconf.set('secure', urlObject.protocol === 'https:');
|
||||
nconf.set('use_port', !!urlObject.port);
|
||||
nconf.set('relative_path', relativePath);
|
||||
nconf.set('port', nconf.get('PORT') || nconf.get('port') || urlObject.port || (nconf.get('PORT_ENV_VAR') ? nconf.get(nconf.get('PORT_ENV_VAR')) : false) || 4567);
|
||||
}
|
||||
|
||||
function printStartupInfo() {
|
||||
if (nconf.get('isPrimary') === 'true') {
|
||||
winston.info('Initializing NodeBB v%s %s', nconf.get('version'), nconf.get('url'));
|
||||
|
||||
54
src/user/jobs/export-posts.js
Normal file
54
src/user/jobs/export-posts.js
Normal file
@@ -0,0 +1,54 @@
|
||||
'use strict';
|
||||
|
||||
const nconf = require('nconf');
|
||||
nconf.argv().env({
|
||||
separator: '__',
|
||||
});
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const json2csvAsync = require('json2csv').parseAsync;
|
||||
|
||||
process.env.NODE_ENV = process.env.NODE_ENV || 'production';
|
||||
|
||||
// Alternate configuration file support
|
||||
const configFile = path.resolve(__dirname, '../../../', nconf.any(['config', 'CONFIG']) || 'config.json');
|
||||
const prestart = require('../../prestart');
|
||||
prestart.loadConfig(configFile);
|
||||
prestart.setupWinston();
|
||||
|
||||
const db = require('../../database');
|
||||
const batch = require('../../batch');
|
||||
|
||||
process.on('message', async function (msg) {
|
||||
if (msg && msg.uid) {
|
||||
await db.init();
|
||||
|
||||
const targetUid = msg.uid;
|
||||
const filePath = path.join(__dirname, '../../../build/export', targetUid + '_posts.csv');
|
||||
|
||||
const posts = require('../../posts');
|
||||
|
||||
let payload = [];
|
||||
await batch.processSortedSet('uid:' + targetUid + ':posts', async function (pids) {
|
||||
let postData = await posts.getPostsData(pids);
|
||||
// Remove empty post references and convert newlines in content
|
||||
postData = postData.filter(Boolean).map(function (post) {
|
||||
post.content = '"' + String(post.content || '').replace(/\n/g, '\\n').replace(/"/g, '\\"') + '"';
|
||||
return post;
|
||||
});
|
||||
payload = payload.concat(postData);
|
||||
}, {
|
||||
batch: 500,
|
||||
interval: 1000,
|
||||
});
|
||||
|
||||
const fields = payload.length ? Object.keys(payload[0]) : [];
|
||||
const opts = { fields };
|
||||
const csv = await json2csvAsync(payload, opts);
|
||||
await fs.promises.writeFile(filePath, csv);
|
||||
|
||||
await db.close();
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
108
src/user/jobs/export-profile.js
Normal file
108
src/user/jobs/export-profile.js
Normal file
@@ -0,0 +1,108 @@
|
||||
'use strict';
|
||||
|
||||
const nconf = require('nconf');
|
||||
nconf.argv().env({
|
||||
separator: '__',
|
||||
});
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const _ = require('lodash');
|
||||
|
||||
process.env.NODE_ENV = process.env.NODE_ENV || 'production';
|
||||
|
||||
// Alternate configuration file support
|
||||
const configFile = path.resolve(__dirname, '../../../', nconf.any(['config', 'CONFIG']) || 'config.json');
|
||||
const prestart = require('../../prestart');
|
||||
prestart.loadConfig(configFile);
|
||||
prestart.setupWinston();
|
||||
|
||||
const db = require('../../database');
|
||||
const batch = require('../../batch');
|
||||
|
||||
process.on('message', async function (msg) {
|
||||
if (msg && msg.uid) {
|
||||
await db.init();
|
||||
await db.initSessionStore();
|
||||
|
||||
const targetUid = msg.uid;
|
||||
|
||||
const profileFile = targetUid + '_profile.json';
|
||||
const profilePath = path.join(__dirname, '../../../build/export', profileFile);
|
||||
|
||||
const user = require('../index');
|
||||
const [userData, userSettings, ips, sessions, usernames, emails, bookmarks, watchedTopics, upvoted, downvoted, following] = await Promise.all([
|
||||
db.getObject('user:' + targetUid),
|
||||
db.getObject('user:' + targetUid + ':settings'),
|
||||
user.getIPs(targetUid, 9),
|
||||
user.auth.getSessions(targetUid),
|
||||
user.getHistory('user:' + targetUid + ':usernames'),
|
||||
user.getHistory('user:' + targetUid + ':emails'),
|
||||
getSetData('uid:' + targetUid + ':bookmarks', 'post:', targetUid),
|
||||
getSetData('uid:' + targetUid + ':followed_tids', 'topic:', targetUid),
|
||||
getSetData('uid:' + targetUid + ':upvote', 'post:', targetUid),
|
||||
getSetData('uid:' + targetUid + ':downvote', 'post:', targetUid),
|
||||
getSetData('following:' + targetUid, 'user:', targetUid),
|
||||
]);
|
||||
delete userData.password;
|
||||
|
||||
let chatData = [];
|
||||
await batch.processSortedSet('uid:' + targetUid + ':chat:rooms', async (roomIds) => {
|
||||
var result = await Promise.all(roomIds.map(roomId => getRoomMessages(targetUid, roomId)));
|
||||
chatData = chatData.concat(_.flatten(result));
|
||||
}, { batch: 100, interval: 1000 });
|
||||
|
||||
await fs.promises.writeFile(profilePath, JSON.stringify({
|
||||
user: userData,
|
||||
settings: userSettings,
|
||||
ips: ips,
|
||||
sessions: sessions,
|
||||
usernames: usernames,
|
||||
emails: emails,
|
||||
messages: chatData,
|
||||
bookmarks: bookmarks,
|
||||
watchedTopics: watchedTopics,
|
||||
upvoted: upvoted,
|
||||
downvoted: downvoted,
|
||||
following: following,
|
||||
}, null, 4));
|
||||
|
||||
await db.close();
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
|
||||
async function getRoomMessages(uid, roomId) {
|
||||
const batch = require('../../batch');
|
||||
let data = [];
|
||||
await batch.processSortedSet('uid:' + uid + ':chat:room:' + roomId + ':mids', async (mids) => {
|
||||
const messageData = await db.getObjects(mids.map(mid => 'message:' + mid));
|
||||
data = data.concat(messageData.filter(m => m && m.fromuid === uid && !m.system)
|
||||
.map(m => ({ content: m.content, timestamp: m.timestamp }))
|
||||
);
|
||||
}, { batch: 500, interval: 1000 });
|
||||
return data;
|
||||
}
|
||||
|
||||
async function getSetData(set, keyPrefix, uid) {
|
||||
const privileges = require('../../privileges');
|
||||
const batch = require('../../batch');
|
||||
let data = [];
|
||||
await batch.processSortedSet(set, async (ids) => {
|
||||
if (keyPrefix === 'post:') {
|
||||
ids = await privileges.posts.filter('topics:read', ids, uid);
|
||||
} else if (keyPrefix === 'topic:') {
|
||||
ids = await privileges.topics.filterTids('topics:read', ids, uid);
|
||||
}
|
||||
let objData = await db.getObjects(ids.map(id => keyPrefix + id));
|
||||
if (keyPrefix === 'post:') {
|
||||
objData = objData.map(o => _.pick(o, ['pid', 'content', 'timestamp']));
|
||||
} else if (keyPrefix === 'topic:') {
|
||||
objData = objData.map(o => _.pick(o, ['tid', 'title', 'timestamp']));
|
||||
} else if (keyPrefix === 'user:') {
|
||||
objData = objData.map(o => _.pick(o, ['uid', 'username']));
|
||||
}
|
||||
data = data.concat(objData);
|
||||
}, { batch: 500, interval: 1000 });
|
||||
return data;
|
||||
}
|
||||
76
src/user/jobs/export-uploads.js
Normal file
76
src/user/jobs/export-uploads.js
Normal file
@@ -0,0 +1,76 @@
|
||||
'use strict';
|
||||
|
||||
const nconf = require('nconf');
|
||||
nconf.argv().env({
|
||||
separator: '__',
|
||||
});
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const archiver = require('archiver');
|
||||
const winston = require('winston');
|
||||
|
||||
process.env.NODE_ENV = process.env.NODE_ENV || 'production';
|
||||
|
||||
// Alternate configuration file support
|
||||
const configFile = path.resolve(__dirname, '../../../', nconf.any(['config', 'CONFIG']) || 'config.json');
|
||||
const prestart = require('../../prestart');
|
||||
prestart.loadConfig(configFile);
|
||||
prestart.setupWinston();
|
||||
|
||||
const db = require('../../database');
|
||||
|
||||
process.on('message', async function (msg) {
|
||||
if (msg && msg.uid) {
|
||||
await db.init();
|
||||
|
||||
const targetUid = msg.uid;
|
||||
|
||||
const archivePath = path.join(__dirname, '../../../build/export', targetUid + '_uploads.zip');
|
||||
const rootDirectory = path.join(__dirname, '../../../public/uploads/');
|
||||
|
||||
const user = require('../index');
|
||||
|
||||
const archive = archiver('zip', {
|
||||
zlib: { level: 9 }, // Sets the compression level.
|
||||
});
|
||||
|
||||
archive.on('warning', function (err) {
|
||||
switch (err.code) {
|
||||
case 'ENOENT':
|
||||
winston.warn('[user/export/uploads] File not found: ' + err.path);
|
||||
break;
|
||||
|
||||
default:
|
||||
winston.warn('[user/export/uploads] Unexpected warning: ' + err.message);
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
archive.on('error', function (err) {
|
||||
const trimPath = function (path) {
|
||||
return path.replace(rootDirectory, '');
|
||||
};
|
||||
switch (err.code) {
|
||||
case 'EACCES':
|
||||
winston.error('[user/export/uploads] File inaccessible: ' + trimPath(err.path));
|
||||
break;
|
||||
|
||||
default:
|
||||
winston.error('[user/export/uploads] Unable to construct archive: ' + err.message);
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
const output = fs.createWriteStream(archivePath);
|
||||
output.on('close', async function () {
|
||||
await db.close();
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
archive.pipe(output);
|
||||
winston.verbose('[user/export/uploads] Collating uploads for uid ' + targetUid);
|
||||
await user.collateUploads(targetUid, archive);
|
||||
archive.finalize();
|
||||
}
|
||||
});
|
||||
Reference in New Issue
Block a user