Skip to content

Commit

Permalink
feat(rules): UNEXPECTED_TRANSLATIONS_DATA rule implementation
Browse files Browse the repository at this point in the history
Closes #247
  • Loading branch information
buchslava committed Nov 17, 2016
1 parent 7623c48 commit 2b46d27
Show file tree
Hide file tree
Showing 59 changed files with 942 additions and 287 deletions.
205 changes: 126 additions & 79 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ const _ = require('lodash');
const DdfDataSet = require('./lib/ddf-definitions/ddf-data-set');
const ddfRules = require('./lib/ddf-rules');
const ddfDataPointRules = require('./lib/ddf-rules/data-point-rules');
const ddfDataPointTransRules = require('./lib/ddf-rules/translation-rules/data-point-rules');
const IssuesFilter = require('./lib/utils/issues-filter');

const CONCURRENT_OPERATIONS_AMOUNT = 30;
Expand All @@ -20,33 +21,103 @@ function walkNonDataPointIssue(context, onIssue) {
});
}

function createDatapointProcessor(context, dataPointFileDescriptor, resultHandler) {
const ddfDataSet = context.ddfDataSet;

return onDataPointReady => {
context.ddfDataSet.getDataPoint().loadFile(
dataPointFileDescriptor,
(dataPointRecord, line) => {
Object.getOwnPropertySymbols(ddfDataPointRules)
.filter(key => context.issuesFilter.isAllowed(key))
.forEach(key => {
const issues = ddfDataPointRules[key]({ddfDataSet, dataPointFileDescriptor, dataPointRecord, line});

resultHandler(issues);
});
},
onDataPointReady
);
};
}

function createDatapointTranslationProcessor(context, dataPointFileTransDescriptor, resultHandler) {
const ddfDataSet = context.ddfDataSet;

return onDataPointReady => {
context.ddfDataSet.getDataPoint().loadFile(
dataPointFileTransDescriptor,
(transRecord, line) => {
Object.getOwnPropertySymbols(ddfDataPointTransRules)
.filter(key => context.issuesFilter.isAllowed(key))
.forEach(key => {
const issues =
ddfDataPointTransRules[key]({ddfDataSet, dataPointFileTransDescriptor, transRecord, line});

resultHandler(issues);
});
},
onDataPointReady
);
};
}

function createDatapointTranslationByRuleProcessor(context, dataPointFileTransDescriptor, resultHandler) {
const ddfDataSet = context.ddfDataSet;

return onDataPointReady => {
context.ddfDataSet.getDataPoint().loadFile(
dataPointFileTransDescriptor,
(transRecord, line) => {
const issues =
ddfDataPointTransRules[context.ruleKey]({ddfDataSet, dataPointFileTransDescriptor, transRecord, line});

resultHandler(issues);
},
onDataPointReady
);
};
}

function getValidationActions(context) {
const dataPointActions =
context.ddfDataSet.getDataPoint().fileDescriptors
.map(fileDescriptor =>
context.prepareDataPointProcessor(fileDescriptor));
const dataPointTransActions = _.flattenDeep(
context.ddfDataSet.getDataPoint().fileDescriptors
.map(fileDescriptor =>
fileDescriptor.getExistingTranslationDescriptors()
.map(transFileDescriptor =>
context.prepareDataPointTransProcessor(transFileDescriptor, fileDescriptor)))
);

return _.concat(dataPointActions, dataPointTransActions);
}

class JSONValidator {
constructor(rootPath, settings) {
this.rootPath = rootPath;
this.settings = settings || {};
this.issueEmitter = new EventEmitter();
}

prepareDataPointProcessor(dataPointDetail) {
const ddfDataSet = this.ddfDataSet;

return onDataPointReady => {
this.ddfDataSet.getDataPoint().loadFile(
dataPointDetail,
(dataPointRecord, line) => {
Object.getOwnPropertySymbols(ddfDataPointRules)
.filter(key => this.issuesFilter.isAllowed(key))
.forEach(key => {
const result = ddfDataPointRules[key]({ddfDataSet, dataPointDetail, dataPointRecord, line});

if (!_.isEmpty(result)) {
this.out = this.out.concat(result.map(issue => issue.view()));
}
});
},
err => onDataPointReady(err)
);
};
prepareDataPointProcessor(dataPointFileDescriptor) {
return createDatapointProcessor(this, dataPointFileDescriptor, result => {
if (!_.isEmpty(result)) {
this.out = this.out.concat(result.map(issue => issue.view()));
}
});
}

prepareDataPointTransProcessor(dataPointFileTransDescriptor, fileDescriptor) {
dataPointFileTransDescriptor.primaryKey = fileDescriptor.primaryKey;

return createDatapointTranslationProcessor(this, dataPointFileTransDescriptor, result => {
if (!_.isEmpty(result)) {
this.out = this.out.concat(result.map(issue => issue.view()));
}
});
}

on(type, data) {
Expand Down Expand Up @@ -77,13 +148,7 @@ class JSONValidator {
return;
}

const dataPointActions = [];

this.ddfDataSet.getDataPoint().fileDescriptors.forEach(detail => {
dataPointActions.push(this.prepareDataPointProcessor(detail));
});

async.parallelLimit(dataPointActions, CONCURRENT_OPERATIONS_AMOUNT, err => {
async.parallelLimit(getValidationActions(this), CONCURRENT_OPERATIONS_AMOUNT, err => {
this.issueEmitter.emit('finish', err, this.out);
});
});
Expand All @@ -98,25 +163,21 @@ class StreamValidator {
}

prepareDataPointProcessor(dataPointDetail) {
const ddfDataSet = this.ddfDataSet;

return onDataPointReady => {
this.ddfDataSet.getDataPoint().loadFile(
dataPointDetail,
(dataPointRecord, line) => {
Object.getOwnPropertySymbols(ddfDataPointRules)
.filter(key => this.issuesFilter.isAllowed(key))
.forEach(key => {
const result = ddfDataPointRules[key]({ddfDataSet, dataPointDetail, dataPointRecord, line});

if (!_.isEmpty(result)) {
result.map(issue => this.issueEmitter.emit('issue', issue.view()));
}
});
},
err => onDataPointReady(err)
);
};
return createDatapointProcessor(this, dataPointDetail, result => {
if (!_.isEmpty(result)) {
result.map(issue => this.issueEmitter.emit('issue', issue.view()));
}
});
}

prepareDataPointTransProcessor(dataPointFileTransDescriptor, fileDescriptor) {
dataPointFileTransDescriptor.primaryKey = fileDescriptor.primaryKey;

return createDatapointTranslationProcessor(this, dataPointFileTransDescriptor, result => {
if (!_.isEmpty(result)) {
result.map(issue => this.issueEmitter.emit('issue', issue.view()));
}
});
}

on(type, data) {
Expand Down Expand Up @@ -146,13 +207,7 @@ class StreamValidator {
return;
}

const dataPointActions = [];

this.ddfDataSet.getDataPoint().fileDescriptors.forEach(detail => {
dataPointActions.push(this.prepareDataPointProcessor(detail));
});

async.parallelLimit(dataPointActions, CONCURRENT_OPERATIONS_AMOUNT, err => {
async.parallelLimit(getValidationActions(this), CONCURRENT_OPERATIONS_AMOUNT, err => {
this.issueEmitter.emit('finish', err);
});
});
Expand All @@ -168,30 +223,21 @@ class SimpleValidator {
}

prepareDataPointProcessor(dataPointDetail) {
const ddfDataSet = this.ddfDataSet;

return onDataPointReady => {
if (!this.isDataSetCorrect) {
onDataPointReady();
return;
return createDatapointProcessor(this, dataPointDetail, result => {
if (!_.isEmpty(result)) {
this.isDataSetCorrect = false;
}
});
}

this.ddfDataSet.getDataPoint().loadFile(
dataPointDetail,
(dataPointRecord, line) => {
Object.getOwnPropertySymbols(ddfDataPointRules)
.filter(key => this.issuesFilter.isAllowed(key))
.forEach(key => {
const result = ddfDataPointRules[key]({ddfDataSet, dataPointDetail, dataPointRecord, line});

if (!_.isEmpty(result)) {
this.isDataSetCorrect = false;
}
});
},
err => onDataPointReady(err)
);
};
prepareDataPointTransProcessor(dataPointFileTransDescriptor, fileDescriptor) {
dataPointFileTransDescriptor.primaryKey = fileDescriptor.primaryKey;

return createDatapointTranslationProcessor(this, dataPointFileTransDescriptor, result => {
if (!_.isEmpty(result)) {
this.isDataSetCorrect = false;
}
});
}

on(type, data) {
Expand Down Expand Up @@ -222,9 +268,6 @@ class SimpleValidator {
return;
}

const getDataPointsActions = () => this.ddfDataSet.getDataPoint().fileDescriptors
.map(detail => this.prepareDataPointProcessor(detail));

this.ddfDataSet.load(() => {
validateNonDataPoints();

Expand All @@ -233,13 +276,17 @@ class SimpleValidator {
return;
}

async.parallelLimit(getDataPointsActions(), CONCURRENT_OPERATIONS_AMOUNT, err => {
async.parallelLimit(getValidationActions(this), CONCURRENT_OPERATIONS_AMOUNT, err => {
this.issueEmitter.emit('finish', err, this.isDataSetCorrect);
});
});
}
}


exports.createDatapointProcessor = createDatapointProcessor;
exports.createDatapointTranslationProcessor = createDatapointTranslationProcessor;
exports.createDatapointTranslationByRuleProcessor = createDatapointTranslationByRuleProcessor;
exports.JSONValidator = JSONValidator;
exports.StreamValidator = StreamValidator;
exports.SimpleValidator = SimpleValidator;
Expand Down
2 changes: 1 addition & 1 deletion lib/data/data-package.js
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ class DataPackage {
);
});

async.parallelLimit(headerGetActions, PROCESS_LIMIT, err => onHeadersReady(err));
async.parallelLimit(headerGetActions, PROCESS_LIMIT, onHeadersReady);
}

fillPrimaryKeys() {
Expand Down
7 changes: 6 additions & 1 deletion lib/data/db.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,12 @@ class Db {
this.storage = {};
}

fillCollection(collectionName, csvPath, onCollectionReady) {
/*eslint max-params: ["error", 4]*/
fillCollection(collectionName, csvPath, onCollectionReady, clearBefore) {
if (clearBefore) {
this.storage[collectionName] = [];
}

const collection = this.getCollection(collectionName);
const fileStream = fs.createReadStream(csvPath);

Expand Down
12 changes: 4 additions & 8 deletions lib/data/directory-descriptor.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@ class DirectoryDescriptor {
}

getTranslations(onTranslationsReady) {
if (_.isEmpty(this.dataPackage.getTranslations()) || !_.isArray(this.dataPackage.getTranslations())) {
if (_.isEmpty(this.dataPackage.getTranslations())) {
onTranslationsReady();
return;
}

const translationsIds = this.dataPackage.getTranslations().map(translation => translation.id);
const translationsIds = _.map(this.dataPackage.getTranslations(), 'id');

this.fileDescriptors.forEach(fileDescriptor => {
const translationFolder = path.resolve(this.dir, constants.TRNSLATIONS_FOLDER);
Expand All @@ -43,9 +43,7 @@ class DirectoryDescriptor {
onTransFileReady =>
fileDescriptor.checkTranslations(onTransFileReady));

async.parallelLimit(transFileActions, PROCESS_LIMIT, err => {
onTranslationsReady(err);
});
async.parallelLimit(transFileActions, PROCESS_LIMIT, onTranslationsReady);
}

check(onDirectoryDescriptorReady) {
Expand Down Expand Up @@ -79,9 +77,7 @@ class DirectoryDescriptor {
return;
}

this.getTranslations(translationErr => {
onDirectoryDescriptorReady(translationErr);
});
this.getTranslations(onDirectoryDescriptorReady);
}
);
});
Expand Down
33 changes: 13 additions & 20 deletions lib/data/file-descriptor.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ class FileDescriptor {
this.fullPath = data.fullPath;
this.csvChecker = new CsvChecker(this.fullPath);
this.hasFirstLine = false;
this.content = [];
}

fillHeaders(onHeadersReady) {
Expand Down Expand Up @@ -82,9 +83,7 @@ class FileDescriptor {
this.issues = _.compact(results);

if (_.isEmpty(this.issues)) {
this.csvChecker.check(() => {
onFileDescriptorChecked();
});
this.csvChecker.check(() => onFileDescriptorChecked());

return;
}
Expand All @@ -95,28 +94,22 @@ class FileDescriptor {
}

checkTranslations(onTranslationsChecked) {
const transFileActions = this.transFileDescriptors
.map(transFileDescriptor => onTransFileReady => {
transFileDescriptor.check(err => {
if (err) {
onTransFileReady();
return;
}

transFileDescriptor.fillHeaders(() => {
onTransFileReady();
});
});
});
const transFileActions = this.transFileDescriptors.map(transFileDescriptor => onTransFileReady => {
transFileDescriptor.check(err => {
if (err) {
onTransFileReady();
return;
}

async.parallelLimit(transFileActions, PROCESS_LIMIT, err => {
onTranslationsChecked(err);
transFileDescriptor.fillHeaders(onTransFileReady);
});
});

async.parallelLimit(transFileActions, PROCESS_LIMIT, onTranslationsChecked);
}

getExistingTranslationDescriptors() {
return this.transFileDescriptors
.filter(transFileDescriptor => _.isEmpty(transFileDescriptor.issues));
return this.transFileDescriptors.filter(transFileDescriptor => _.isEmpty(transFileDescriptor.issues));
}
}

Expand Down
Loading

0 comments on commit 2b46d27

Please sign in to comment.