If your segemnts are broken or Mailtrain complains about missing 20190726150000_shorten_field_column_names.js, run the following in `server/setup/knex/fixes`: ```NODE_ENV=production node fix-20190726150000_shorten_field_column_names.js```
This commit is contained in:
parent
bb237b3da4
commit
ae5faadffa
3 changed files with 72 additions and 2 deletions
|
@ -8,6 +8,7 @@ const interoperableErrors = require('../../shared/interoperable-errors');
|
|||
const shares = require('./shares');
|
||||
const validators = require('../../shared/validators');
|
||||
const shortid = require('shortid');
|
||||
const slugify = require('slugify');
|
||||
const segments = require('./segments');
|
||||
const { formatDate, formatBirthday, parseDate, parseBirthday } = require('../../shared/date');
|
||||
const { getFieldColumn } = require('../../shared/lists');
|
||||
|
@ -542,7 +543,7 @@ async function createTx(tx, context, listId, entity) {
|
|||
|
||||
let columnName;
|
||||
if (!fieldType.grouped) {
|
||||
columnName = ('custom_' + '_' + shortid.generate()).toLowerCase().replace(/[^a-z0-9_]/g, '_');
|
||||
columnName = ('custom_' + slugify(entity.name, '_').substring(0, 32) + '_' + shortid.generate()).toLowerCase().replace(/[^a-z0-9_]/g, '_');
|
||||
}
|
||||
|
||||
const filteredEntity = filterObject(entity, allowedKeysCreate);
|
||||
|
|
|
@ -4,6 +4,6 @@ if (!process.env.NODE_CONFIG_DIR) {
|
|||
process.env.NODE_CONFIG_DIR = __dirname + '/../../config';
|
||||
}
|
||||
|
||||
const config = require('server/setup/knex/config');
|
||||
const config = require('../../lib/config');
|
||||
|
||||
module.exports = config;
|
||||
|
|
|
@ -0,0 +1,69 @@
|
|||
'use strict';
|
||||
|
||||
const config = require('../config');
|
||||
const knex = require('../../../lib/knex');
|
||||
const shortid = require('shortid');
|
||||
const slugify = require('slugify');
|
||||
|
||||
async function run() {
|
||||
const lists = await knex('lists');
|
||||
for (const list of lists) {
|
||||
console.log(`Processing list ${list.id}`);
|
||||
const fields = await knex('custom_fields').whereNotNull('column').where('list', list.id);
|
||||
|
||||
const fieldsMap = new Map();
|
||||
const prefixesMap = new Map();
|
||||
|
||||
for (const field of fields) {
|
||||
const oldName = field.column;
|
||||
const newName = ('custom_' + slugify(field.name, '_').substring(0, 32) + '_' + shortid.generate()).toLowerCase().replace(/[^a-z0-9_]/g, '_');
|
||||
const formerPrefix = ('custom_' + slugify(field.name, '_') + '_').toLowerCase().replace(/[^a-z0-9_]/g, '');
|
||||
|
||||
fieldsMap.set(oldName, newName);
|
||||
prefixesMap.set(formerPrefix, newName);
|
||||
|
||||
await knex('custom_fields').where('id', field.id).update('column', newName);
|
||||
|
||||
await knex.schema.table('subscription__' + list.id, table => {
|
||||
table.renameColumn(oldName, newName);
|
||||
table.renameColumn('source_' + oldName, 'source_' + newName);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function processRule(rule) {
|
||||
if (rule.type === 'all' || rule.type === 'some' || rule.type === 'none') {
|
||||
for (const childRule of rule.rules) {
|
||||
processRule(childRule);
|
||||
}
|
||||
} else {
|
||||
let newName = fieldsMap.get(rule.column);
|
||||
if (newName) {
|
||||
rule.column = newName;
|
||||
return;
|
||||
}
|
||||
|
||||
for (const [formerPrefix, newName] of prefixesMap.entries()) {
|
||||
if (rule.column.startsWith(formerPrefix)) {
|
||||
rule.column = newName;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const segments = await knex('segments').where('list', list.id);
|
||||
for (const segment of segments) {
|
||||
const settings = JSON.parse(segment.settings);
|
||||
processRule(settings.rootRule);
|
||||
await knex('segments').where('id', segment.id).update({settings: JSON.stringify(settings)});
|
||||
}
|
||||
}
|
||||
|
||||
await knex('knex_migrations').where('name', '20190726150000_shorten_field_column_names.js').del();
|
||||
|
||||
console.log('All fixes done');
|
||||
process.exit();
|
||||
}
|
||||
|
||||
run().catch(err => console.error(err));
|
Loading…
Reference in a new issue