UI for basic import and preparation phase of CSV.

This commit is contained in:
Tomas Bures 2018-08-26 11:46:12 +02:00
parent 877e0a857d
commit 739b9452de
24 changed files with 907 additions and 138 deletions

View file

@ -6,24 +6,43 @@ const { enforce, filterObject } = require('../lib/helpers');
const dtHelpers = require('../lib/dt-helpers');
const interoperableErrors = require('../shared/interoperable-errors');
const shares = require('./shares');
const {ImportType, ImportStatus, RunStatus} = require('../shared/imports');
const {ImportType, ImportStatus, RunStatus, prepFinished, prepFinishedAndNotInProgress, runInProgress} = require('../shared/imports');
const fs = require('fs-extra-promise');
const path = require('path');
const importer = require('../lib/importer');
const filesDir = path.join(__dirname, '..', 'files', 'imports');
const allowedKeys = new Set(['name', 'description', 'type', 'settings']);
const allowedKeysCreate = new Set(['name', 'description', 'type', 'settings', 'mapping']);
const allowedKeysUpdate = new Set(['name', 'description', 'mapping']);
function hash(entity) {
return hasher.hash(filterObject(entity, allowedKeys));
return hasher.hash(filterObject(entity, allowedKeysUpdate));
}
async function getById(context, listId, id) {
async function getById(context, listId, id, withSampleRow = false) {
return await knex.transaction(async tx => {
await shares.enforceEntityPermissionTx(tx, context, 'list', listId, 'viewImports');
const entity = await tx('imports').where({list: listId, id}).first();
if (!entity) {
throw new interoperableErrors.NotFoundError();
}
entity.settings = JSON.parse(entity.settings);
entity.mapping = JSON.parse(entity.mapping);
if (withSampleRow && prepFinished(entity.status)) {
if (entity.type === ImportType.CSV_FILE) {
const importTable = 'import_file__' + id;
const row = await tx(importTable).first();
delete row.id;
entity.sampleRow = row;
}
}
return entity;
});
@ -49,22 +68,21 @@ async function _validateAndPreprocess(tx, listId, entity, isCreate) {
enforce(entity.type >= ImportType.MIN && entity.type <= ImportType.MAX, 'Invalid import type');
entity.settings = entity.settings || {};
entity.mapping = entity.mapping || {};
if (entity.type === ImportType.CSV_FILE) {
if (isCreate && entity.type === ImportType.CSV_FILE) {
entity.settings.csv = entity.settings.csv || {};
enforce(entity.settings.csv.delimiter.trim(), 'CSV delimiter must not be empty');
enforce(entity.settings.csv.delimiter && entity.settings.csv.delimiter.trim(), 'CSV delimiter must not be empty');
}
}
async function create(context, listId, entity, files) {
return await knex.transaction(async tx => {
const res = await knex.transaction(async tx => {
shares.enforceGlobalPermission(context, 'setupAutomation');
await shares.enforceEntityPermissionTx(tx, context, 'list', listId, 'manageImports');
await _validateAndPreprocess(tx, listId, entity, true);
// FIXME - set status
if (entity.type === ImportType.CSV_FILE) {
enforce(files.csvFile, 'File must be included');
const csvFile = files.csvFile[0];
@ -81,18 +99,22 @@ async function create(context, listId, entity, files) {
}
const filteredEntity = filterObject(entity, allowedKeys);
const filteredEntity = filterObject(entity, allowedKeysCreate);
filteredEntity.list = listId;
filteredEntity.settings = JSON.stringify(filteredEntity.settings);
filteredEntity.mapping = JSON.stringify(filteredEntity.mapping);
const ids = await tx('imports').insert(filteredEntity);
const id = ids[0];
return id;
});
importer.scheduleCheck();
return res;
}
async function updateWithConsistencyCheck(context, listId, entity, files) {
async function updateWithConsistencyCheck(context, listId, entity) {
await knex.transaction(async tx => {
shares.enforceGlobalPermission(context, 'setupAutomation');
await shares.enforceEntityPermissionTx(tx, context, 'list', listId, 'manageImports');
@ -102,25 +124,20 @@ async function updateWithConsistencyCheck(context, listId, entity, files) {
throw new interoperableErrors.NotFoundError();
}
existing.settings = JSON.parse(existing.settings);
existing.mapping = JSON.parse(existing.mapping);
const existingHash = hash(existing);
if (existingHash !== entity.originalHash) {
throw new interoperableErrors.ChangedError();
}
enforce(prepFinishedAndNotInProgress(existing.status), 'Cannot save updates until preparation or run is finished');
enforce(entity.type === existing.type, 'Import type cannot be changed');
await _validateAndPreprocess(tx, listId, entity, false);
if (entity.type === ImportType.CSV_FILE) {
entity.settings.csv = existing.settings.csv;
}
// FIXME - set status
// FIXME - create CSV import table
const filteredEntity = filterObject(entity, allowedKeys);
const filteredEntity = filterObject(entity, allowedKeysUpdate);
filteredEntity.list = listId;
filteredEntity.settings = JSON.stringify(filteredEntity.settings);
filteredEntity.mapping = JSON.stringify(filteredEntity.mapping);
await tx('imports').where({list: listId, id: entity.id}).update(filteredEntity);
});
@ -134,9 +151,15 @@ async function removeTx(tx, context, listId, id) {
throw new interoperableErrors.NotFoundError();
}
// FIXME - remove csv import table
existing.settings = JSON.parse(existing.settings);
await tx('import_failed').whereIn('run', function() {this.from('import_runs').select('id').where('import', id)});
const filePath = path.join(filesDir, existing.settings.csv.filename);
await fs.removeAsync(filePath);
const importTable = 'import_file__' + id;
await knex.schema.dropTableIfExists(importTable);
await tx('import_failed').whereIn('run', function() {this.from('import_runs').select('id').where('import', id)}).del();
await tx('import_runs').where('import', id).del();
await tx('imports').where({list: listId, id}).del();
}
@ -154,6 +177,60 @@ async function removeAllByListIdTx(tx, context, listId) {
}
}
async function start(context, listId, id) {
await knex.transaction(async tx => {
shares.enforceGlobalPermission(context, 'setupAutomation');
await shares.enforceEntityPermissionTx(tx, context, 'list', listId, 'manageImports');
const entity = await tx('imports').where({list: listId, id}).first();
if (!entity) {
throw new interoperableErrors.NotFoundError();
}
if (!prepFinishedAndNotInProgress(entity.status)) {
throw new interoperableErrors.InvalidStateError('Cannot start until preparation or run is finished');
}
await tx('imports').where({list: listId, id}).update({
status: ImportStatus.RUN_SCHEDULED
});
await tx('import_runs').insert({
import: id,
status: RunStatus.SCHEDULED,
mapping: entity.mapping
});
});
importer.scheduleCheck();
}
async function stop(context, listId, id) {
await knex.transaction(async tx => {
shares.enforceGlobalPermission(context, 'setupAutomation');
await shares.enforceEntityPermissionTx(tx, context, 'list', listId, 'manageImports');
const entity = await tx('imports').where({list: listId, id}).first();
if (!entity) {
throw new interoperableErrors.NotFoundError();
}
if (!runInProgress(entity.status)) {
throw new interoperableErrors.InvalidStateError('No import is currently running');
}
await tx('imports').where({list: listId, id}).update({
status: ImportStatus.RUN_STOPPING
});
await tx('import_runs').where('import', id).whereIn('status', [RunStatus.SCHEDULED, RunStatus.RUNNING]).update({
status: RunStatus.STOPPING
});
});
importer.scheduleCheck();
}
// This is to handle circular dependency with segments.js
module.exports = {
@ -164,5 +241,7 @@ module.exports = {
create,
updateWithConsistencyCheck,
remove,
removeAllByListIdTx
removeAllByListIdTx,
start,
stop
};