Fixes in reports (generating a CSV).

Added caching of generated images in mosaico handler.
Various other fixes.
This commit is contained in:
Tomas Bures 2019-04-22 02:41:40 +02:00
parent 055c4c6b51
commit 66702b5edc
39 changed files with 545 additions and 278 deletions

View file

@ -6,6 +6,7 @@ const log = require('./log');
const path = require('path');
const fs = require('fs-extra')
const crypto = require('crypto');
const bluebird = require('bluebird');
let zoneMtaProcess;
@ -155,6 +156,6 @@ function spawn(callback) {
}
}
module.exports.spawn = spawn;
module.exports.spawn = bluebird.promisify(spawn);
module.exports.getUsername = getUsername;
module.exports.getPassword = getPassword;

View file

@ -10,6 +10,7 @@ const log = require('./log');
const fs = require('fs');
const pathlib = require('path');
const Handlebars = require('handlebars');
const bluebird = require('bluebird');
const highestLegacySchemaVersion = 33;
@ -136,8 +137,30 @@ function runInitial(callback) {
}, callback);
}
function runUpdates(callback, runCount) {
runCount = Number(runCount) || 0;
function applyUpdate(update, callback) {
getSql(update.path, update.data, (err, sql) => {
if (err) {
return callback(err);
}
db.getConnection((err, connection) => {
if (err) {
return callback(err);
}
connection.query(sql, err => {
connection.release();
if (err) {
return callback(err);
}
return callback(null, true);
});
});
});
}
function runUpdates(runCount, callback) {
listTables((err, tables) => {
if (err) {
return callback(err);
@ -148,7 +171,7 @@ function runUpdates(callback, runCount) {
return callback(new Error('Settings table not found from database'));
}
log.info('sql', 'SQL not set up, initializing');
return runInitial(runUpdates.bind(null, callback, ++runCount));
return runInitial(runUpdates.bind(null, ++runCount, callback));
}
getSchemaVersion((err, schemaVersion) => {
@ -196,37 +219,13 @@ function runUpdates(callback, runCount) {
});
}
function applyUpdate(update, callback) {
getSql(update.path, update.data, (err, sql) => {
if (err) {
return callback(err);
}
const runUpdatesAsync = bluebird.promisify(runUpdates);
const dbEndAsync = bluebird.promisify(db.end.bind(db));
db.getConnection((err, connection) => {
if (err) {
return callback(err);
}
connection.query(sql, err => {
connection.release();
if (err) {
return callback(err);
}
return callback(null, true);
});
});
});
async function dbcheck() {
await runUpdatesAsync(0);
await dbEndAsync();
log.info('sql', 'Database check completed');
}
module.exports = callback => {
runUpdates(err => {
if (err) {
return callback(err);
}
db.end(() => {
log.info('sql', 'Database check completed');
return callback(null, true);
});
});
};
module.exports = dbcheck;

View file

@ -3,17 +3,12 @@
const fork = require('child_process').fork;
const log = require('./log');
const path = require('path');
const bluebird = require('bluebird');
const requestCallbacks = {};
let messageTid = 0;
let executorProcess;
module.exports = {
spawn,
start,
stop
};
function spawn(callback) {
log.verbose('Executor', 'Spawning executor process');
@ -81,3 +76,6 @@ function stop(tid) {
});
}
module.exports.spawn = bluebird.promisify(spawn);
module.exports.start = start;
module.exports.stop = stop;

View file

@ -4,15 +4,11 @@ const fork = require('child_process').fork;
const log = require('./log');
const path = require('path');
const senders = require('./senders');
const bluebird = require('bluebird');
let messageTid = 0;
let feedcheckProcess;
module.exports = {
spawn,
scheduleCheck
};
function spawn(callback) {
log.verbose('Feed', 'Spawning feedcheck process');
@ -46,3 +42,5 @@ function scheduleCheck() {
messageTid++;
}
module.exports.spawn = bluebird.promisify(spawn);
module.exports.scheduleCheck = scheduleCheck;

166
server/lib/file-cache.js Normal file
View file

@ -0,0 +1,166 @@
'use strict';
const { filesDir } = require('../models/files');
const path = require('path');
const fs = require('fs-extra-promise');
const stream = require('stream');
const privilegeHelpers = require('./privilege-helpers');
const synchronized = require('./synchronized');
const { tmpName } = require('tmp-promise');
const fileCacheFilesDir = path.join(filesDir, 'cache');
const fileCaches = new Map();
async function _fileCache(typeId, cacheConfig, fileNameGen) {
if (fileCaches.has(typeId)) {
return fileCaches.get(typeId);
}
const localFilesDir = path.join(fileCacheFilesDir, typeId);
await fs.emptyDirAsync(localFilesDir);
await privilegeHelpers.ensureMailtrainDir(localFilesDir);
const cachedFiles = new Map();
let nextFilesOrder = 1;
const pruneCache = async() => {
const entries = [];
for (const entry of cachedFiles.values()) {
if (entry.isReady) {
entries.push(entry);
}
}
entries.sort((x, y) => y.order - x.order);
let cumulativeSize = 0;
const maxSize = cacheConfig.maxSize * 1048576;
for (const entry of entries) {
cumulativeSize += entry.size;
if (cumulativeSize > maxSize) {
entry.isReady = false;
await fs.unlinkAsync(path.join(localFilesDir, entry.fileName));
cachedFiles.delete(entry.fileName);
}
}
};
const thisFileCache = (req, res, next) => {
const fileName = fileNameGen ? fileNameGen(req) : req.url.substring(1);
const localFilePath = path.join(localFilesDir, fileName);
const fileInfo = cachedFiles.get(fileName);
if (fileInfo && fileInfo.isReady) {
res.sendFile(
localFilePath,
{
headers: fileInfo.headers
},
err => {
if (err) next(err);
}
);
} else {
// This means that the file is not present. We thus generate it and cache it.
let fileStream = null;
let tmpFilePath = null;
// If the file does not exist yet, we store. If we receive a simulataneous request, while the file is being generate and stored,
// we only generate it (but not store it) in the second parallel request.
const isStoring = !fileInfo;
if (isStoring) {
cachedFiles.set(fileName, {
fileName,
isReady: false
});
}
const ensureFileStream = callback => {
if (!fileStream) {
tmpName().then(tmp => {
tmpFilePath = tmp;
fileStream = fs.createWriteStream(tmpFilePath);
callback();
})
} else {
callback();
}
};
let fileSize = 0;
res.fileCacheResponse = new stream.Writable({
write(chunk, encoding, callback) {
res.write(chunk, encoding);
if (isStoring) {
fileSize += chunk.length;
ensureFileStream(() => {
fileStream.write(chunk, encoding);
callback();
});
} else {
callback();
}
},
final(callback) {
res.end();
if (isStoring) {
ensureFileStream(() => {
fileStream.end(null, null, () => {
fs.moveAsync(tmpFilePath, localFilePath, {})
.then(() => {
cachedFiles.set(fileName, {
fileName,
size: fileSize,
order: nextFilesOrder,
headers: res.getHeaders(),
isReady: true
});
nextFilesOrder += 1;
callback();
// noinspection JSIgnoredPromiseFromCall
pruneCache();
})
.catch(err => next(err));
});
});
} else {
callback();
}
},
destroy(err, callback) {
res.destroy(err);
if (fileStream) {
fileStream.destroy(err);
fs.unlink(tmpFilePath, () => {
cachedFiles.delete(fileName);
callback();
});
} else {
callback();
}
}
});
next();
}
};
fileCaches.set(typeId, thisFileCache);
return thisFileCache;
}
const fileCache = synchronized(_fileCache);
module.exports.fileCache = fileCache;
module.exports.fileCacheFilesDir = fileCacheFilesDir;

View file

@ -7,15 +7,11 @@ const path = require('path');
const {ImportStatus, RunStatus} = require('../../shared/imports');
const {ListActivityType} = require('../../shared/activity-log');
const activityLog = require('./activity-log');
const bluebird = require('bluebird');
let messageTid = 0;
let importerProcess;
module.exports = {
spawn,
scheduleCheck
};
function spawn(callback) {
log.verbose('Importer', 'Spawning importer process');
@ -65,4 +61,5 @@ function scheduleCheck() {
messageTid++;
}
module.exports.spawn = bluebird.promisify(spawn);
module.exports.scheduleCheck = scheduleCheck;

View file

@ -53,7 +53,7 @@ function ensureMailtrainOwner(file, callback) {
fs.chown(file, ids.uid, ids.gid, callback);
}
async function ensureMailtrainDir(dir) {
async function ensureMailtrainDir(dir, recursive) {
const ids = getConfigUidGid();
await fs.ensureDir(dir);
await fs.chownAsync(dir, ids.uid, ids.gid);

View file

@ -11,9 +11,10 @@ function nameToFileName(name) {
replace(/--*/g, '-');
}
const reportFilesDir = path.join(__dirname, '..', 'files', 'reports');
function getReportFileBase(report) {
return path.join(__dirname, '..', 'protected', 'reports', report.id + '-' + nameToFileName(report.name));
return path.join(reportFilesDir, report.id + '-' + nameToFileName(report.name));
}
function getReportContentFile(report) {
@ -28,5 +29,6 @@ function getReportOutputFile(report) {
module.exports = {
getReportContentFile,
getReportOutputFile,
nameToFileName
nameToFileName,
reportFilesDir
};

View file

@ -6,6 +6,7 @@ const path = require('path');
const knex = require('./knex');
const {CampaignStatus} = require('../../shared/campaigns');
const builtinZoneMta = require('./builtin-zone-mta');
const bluebird = require('bluebird');
let messageTid = 0;
let senderProcess;
@ -59,9 +60,6 @@ function reloadConfig(sendConfigurationId) {
messageTid++;
}
module.exports = {
spawn,
scheduleCheck,
reloadConfig
};
module.exports.spawn = bluebird.promisify(spawn);
module.exports.scheduleCheck = scheduleCheck;
module.exports.reloadConfig = reloadConfig;

View file

@ -0,0 +1,26 @@
'use strict';
// This implements a simple wrapper around an async function that prevents concurrent execution of the function from two asynchronous chains
// It enforces that the running execution has to complete first before another one is started.
function synchronized(asyncFn) {
let ensurePromise = null;
return async (...args) => {
while (ensurePromise) {
try {
await ensurePromise;
} catch (err) {
}
}
ensurePromise = asyncFn(...args);
try {
return await ensurePromise;
} finally {
ensurePromise = null;
}
}
}
module.exports = synchronized;