不要怂,就是干,撸起袖子干!

Commit ab1653d5 by Overlook Motel

hooks refactor

1 parent 41812ff7
Showing with 252 additions and 186 deletions
...@@ -416,7 +416,9 @@ module.exports = (function() { ...@@ -416,7 +416,9 @@ module.exports = (function() {
fieldsOrOptions = { fields: fieldsOrOptions }; fieldsOrOptions = { fields: fieldsOrOptions };
} }
options = Utils._.extend({}, options, fieldsOrOptions); options = Utils._.extend({
hooks: true
}, options, fieldsOrOptions);
if (!options.fields) { if (!options.fields) {
options.fields = Object.keys(this.Model.attributes); options.fields = Object.keys(this.Model.attributes);
...@@ -451,8 +453,11 @@ module.exports = (function() { ...@@ -451,8 +453,11 @@ module.exports = (function() {
options.fields.push(createdAtAttr); options.fields.push(createdAtAttr);
} }
return self.hookValidate({ return Promise.try(function() {
skip: _.difference(Object.keys(self.rawAttributes), options.fields) // Validate
if (options.hooks) {
return self.hookValidate({skip: _.difference(Object.keys(self.rawAttributes), options.fields)});
}
}).then(function() { }).then(function() {
options.fields.forEach(function(field) { options.fields.forEach(function(field) {
if (self.dataValues[field] !== undefined) { if (self.dataValues[field] !== undefined) {
...@@ -537,9 +542,11 @@ module.exports = (function() { ...@@ -537,9 +542,11 @@ module.exports = (function() {
// Add the values to the Instance // Add the values to the Instance
self.dataValues = _.extend(self.dataValues, values); self.dataValues = _.extend(self.dataValues, values);
return Promise.try(function() {
// Run before hook
if (options.hooks) {
return self.Model.runHooks('before' + hook, self).then(function() { return self.Model.runHooks('before' + hook, self).then(function() {
// dataValues might have changed inside the hook, rebuild // dataValues might have changed inside the hook, rebuild the values hash
// the values hash
values = {}; values = {};
options.fields.forEach(function(attr) { options.fields.forEach(function(attr) {
...@@ -555,8 +562,10 @@ module.exports = (function() { ...@@ -555,8 +562,10 @@ module.exports = (function() {
}); });
args[2] = values; args[2] = values;
});
return self.QueryInterface[query].apply(self.QueryInterface, args).catch (function(err) { }
}).then(function() {
return self.QueryInterface[query].apply(self.QueryInterface, args).catch(function(err) {
if (!!self.__options.uniqueKeys && err.code && self.QueryInterface.QueryGenerator.uniqueConstraintMapping.code === err.code) { if (!!self.__options.uniqueKeys && err.code && self.QueryInterface.QueryGenerator.uniqueConstraintMapping.code === err.code) {
var fields = self.QueryInterface.QueryGenerator.uniqueConstraintMapping.map(err.toString()); var fields = self.QueryInterface.QueryGenerator.uniqueConstraintMapping.map(err.toString());
...@@ -571,15 +580,20 @@ module.exports = (function() { ...@@ -571,15 +580,20 @@ module.exports = (function() {
} }
throw err; throw err;
}).then(function(result) { }).tap(function(result) {
// Transfer database generated values (defaults, autoincrement, etc) // Transfer database generated values (defaults, autoincrement, etc)
values = _.extend(values, result.dataValues); values = _.extend(values, result.dataValues);
// Ensure new values are on Instance, and reset previousDataValues // Ensure new values are on Instance, and reset previousDataValues
result.dataValues = _.extend(result.dataValues, values); result.dataValues = _.extend(result.dataValues, values);
result._previousDataValues = _.clone(result.dataValues); result._previousDataValues = _.clone(result.dataValues);
}).tap(function(result) {
return self.Model.runHooks('after' + hook, result).return (result); // Run before hook
if (options.hooks) {
return self.Model.runHooks('after' + hook, result);
}
}).then(function(result) {
return result;
}); });
}); });
}); });
...@@ -663,24 +677,37 @@ module.exports = (function() { ...@@ -663,24 +677,37 @@ module.exports = (function() {
* @return {Promise<undefined>} * @return {Promise<undefined>}
*/ */
Instance.prototype.destroy = function(options) { Instance.prototype.destroy = function(options) {
options = options || {}; options = Utils._.extend({
options.force = options.force === undefined ? false : Boolean(options.force); hooks: true,
force: false
}, options || {});
var self = this; var self = this;
// This semi awkward syntax where we can't return the chain directly but have to return the last .then() call is to allow sql proxying // This semi awkward syntax where we can't return the chain directly but have to return the last .then() call is to allow sql proxying
return self.Model.runHooks(self.Model.options.hooks.beforeDestroy, self).then(function() { return Promise.try(function() {
// Run before hook
if (options.hooks) {
return self.Model.runHooks('beforeDestroy', self);
}
}).then(function() {
var identifier; var identifier;
if (self.Model._timestampAttributes.deletedAt && options.force === false) { if (self.Model._timestampAttributes.deletedAt && options.force === false) {
self.dataValues[self.Model._timestampAttributes.deletedAt] = new Date(); self.dataValues[self.Model._timestampAttributes.deletedAt] = new Date();
options.hooks = false;
return self.save(options); return self.save(options);
} else { } else {
identifier = self.__options.hasPrimaryKeys ? self.primaryKeyValues : { id: self.id }; identifier = self.__options.hasPrimaryKeys ? self.primaryKeyValues : { id: self.id };
return self.QueryInterface.delete(self, self.QueryInterface.QueryGenerator.addSchema(self.Model), identifier, options); return self.QueryInterface.delete(self, self.QueryInterface.QueryGenerator.addSchema(self.Model), identifier, options);
} }
}).then(function(results) { }).tap(function(result) {
return self.Model.runHooks(self.Model.options.hooks.afterDestroy, self).return (results); // Run after hook
if (options.hooks) {
return self.Model.runHooks('afterDestroy', self);
}
}).then(function(result) {
return result;
}); });
}; };
......
...@@ -1109,7 +1109,8 @@ module.exports = (function() { ...@@ -1109,7 +1109,8 @@ module.exports = (function() {
* @param {Object} [options] * @param {Object} [options]
* @param {Array} [options.fields] Fields to insert (defaults to all fields) * @param {Array} [options.fields] Fields to insert (defaults to all fields)
* @param {Boolean} [options.validate=false] Should each row be subject to validation before it is inserted. The whole insert will fail if one row fails validation * @param {Boolean} [options.validate=false] Should each row be subject to validation before it is inserted. The whole insert will fail if one row fails validation
* @param {Boolean} [options.hooks=false] Run before / after create hooks for each individual Instance? BulkCreate hooks will still be run. * @param {Boolean} [options.hooks=true] Run before / after bulk create hooks?
* @param {Boolean} [options.individualHooks=false] Run before / after create hooks for each individual Instance? BulkCreate hooks will still be run if options.hooks is true.
* @param {Boolean} [options.ignoreDuplicates=false] Ignore duplicate values for primary keys? (not supported by postgres) * @param {Boolean} [options.ignoreDuplicates=false] Ignore duplicate values for primary keys? (not supported by postgres)
* *
* @return {Promise<Array<Instance>>} * @return {Promise<Array<Instance>>}
...@@ -1124,7 +1125,8 @@ module.exports = (function() { ...@@ -1124,7 +1125,8 @@ module.exports = (function() {
options = Utils._.extend({ options = Utils._.extend({
validate: false, validate: false,
hooks: false, hooks: true,
individualHooks: false,
ignoreDuplicates: false ignoreDuplicates: false
}, options || {}); }, options || {});
...@@ -1140,78 +1142,75 @@ module.exports = (function() { ...@@ -1140,78 +1142,75 @@ module.exports = (function() {
} }
var self = this var self = this
, updatedAtAttr = this._timestampAttributes.updatedAt
, createdAtAttr = this._timestampAttributes.createdAt , createdAtAttr = this._timestampAttributes.createdAt
, errors = [] , updatedAtAttr = this._timestampAttributes.updatedAt
, daoPromises = [] , now = Utils.now(self.modelManager.sequelize.options.dialect);
, daos = records.map(function(values) {
return self.build(values, {
isNewRecord: true
});
});
if (options.validate && options.fields.length) {
var skippedFields = Utils._.difference(Object.keys(self.attributes), options.fields);
}
var runAfterCreate = function() { // build DAOs
return self.runHooks('afterBulkCreate', daos, options.fields).spread(function(newRecords) { var daos = records.map(function(values) {
return new Promise.resolve(newRecords || daos); return self.build(values, {isNewRecord: true});
}); });
};
return self.runHooks('beforeBulkCreate', daos, options.fields).spread(function(newRecords, newFields) {
daos = newRecords || daos;
options.fields = newFields || options.fields;
var runHook = function(dao) {
if (options.hooks === false) {
var values = options.fields.length > 0 ? {} : dao.dataValues;
return Promise.try(function() {
// Run before hook
if (options.hooks) {
return self.runHooks('beforeBulkCreate', daos, options.fields).spread(function(_daos, _fields) {
daos = _daos || daos;
options.fields = _fields || options.fields;
});
}
}).then(function() {
daos.forEach(function(dao) {
// Filter dataValues by options.fields
var values = {};
options.fields.forEach(function(field) { options.fields.forEach(function(field) {
values[field] = dao.dataValues[field]; values[field] = dao.dataValues[field];
}); });
// set createdAt/updatedAt attributes
if (createdAtAttr && !values[createdAtAttr]) { if (createdAtAttr && !values[createdAtAttr]) {
values[createdAtAttr] = Utils.now(self.modelManager.sequelize.options.dialect); values[createdAtAttr] = now;
} }
if (updatedAtAttr && !values[updatedAtAttr]) { if (updatedAtAttr && !values[updatedAtAttr]) {
values[updatedAtAttr] = Utils.now(self.modelManager.sequelize.options.dialect); values[updatedAtAttr] = now;
} }
records.push(values); dao.dataValues = values;
});
return values;
}
return dao.save({ transaction: options.transaction });
};
var runValidation = function(dao) { // Validate
if (options.validate === false) { if (options.validate) {
return dao; var skippedFields = Utils._.difference(Object.keys(self.attributes), options.fields);
}
var fn = options.hooks === true ? 'hookValidate' : 'validate'; var errors = [];
return Promise.map(daos, function(dao) {
var fn = options.individualHooks ? 'hookValidate' : 'validate';
return dao[fn]({skip: skippedFields}).then(function(err) { return dao[fn]({skip: skippedFields}).then(function(err) {
if (!!err) { if (!!err) {
errors.push({record: dao, errors: err}); errors.push({record: dao, errors: err});
} }
}); });
}; }).then(function() {
records = [];
daos.forEach(function(dao) {
daoPromises.push(runValidation(dao));
daoPromises.push(runHook(dao));
});
return Promise.all(daoPromises).then(function() {
if (errors.length) { if (errors.length) {
// Validation or hooks failed
return Promise.reject(errors); return Promise.reject(errors);
} else if (records.length) { }
});
}
}).then(function() {
if (options.individualHooks) {
// Create each dao individually
return Promise.map(daos, function(dao) {
return dao.save({transaction: options.transaction});
}).then(function(_daos) {
daos = _daos;
});
} else {
// Create all in one query
// Recreate records from daos to represent any changes made in hooks or validation
records = daos.map(function(dao) {
return dao.dataValues;
});
// Map field names // Map field names
records.forEach(function(values) { records.forEach(function(values) {
for (var attr in values) { for (var attr in values) {
...@@ -1234,12 +1233,17 @@ module.exports = (function() { ...@@ -1234,12 +1233,17 @@ module.exports = (function() {
} }
// Insert all records at once // Insert all records at once
return self.QueryInterface.bulkInsert(self.getTableName(), records, options, attributes).then(runAfterCreate); return self.QueryInterface.bulkInsert(self.getTableName(), records, options, attributes);
} else {
// Records were already saved while running create / update hooks
return runAfterCreate();
} }
}).then(function() {
// Run after hook
if (options.hooks) {
return self.runHooks('afterBulkCreate', daos, options.fields).spread(function(_daos) {
if (_daos) daos = _daos;
}); });
}
}).then(function() {
return daos;
}); });
}; };
...@@ -1248,79 +1252,66 @@ module.exports = (function() { ...@@ -1248,79 +1252,66 @@ module.exports = (function() {
* *
* @param {Object} [where] Options to describe the scope of the search. * @param {Object} [where] Options to describe the scope of the search.
* @param {Object} [options] * @param {Object} [options]
* @param {Boolean} [options.hooks] If set to true, destroy will find all records within the where parameter and will execute before-/ after bulkDestroy hooks on each row * @param {Boolean} [options.hooks=true] Run before / after bulk destroy hooks?
* @param {Boolean} [options.individualHooks=false] If set to true, destroy will find all records within the where parameter and will execute before / after bulkDestroy hooks on each row
* @param {Number} [options.limit] How many rows to delete * @param {Number} [options.limit] How many rows to delete
* @param {Boolean} [options.truncate] If set to true, dialects that support it will use TRUNCATE instead of DELETE FROM. If a table is truncated the where and limit options are ignored * @param {Boolean} [options.truncate] If set to true, dialects that support it will use TRUNCATE instead of DELETE FROM. If a table is truncated the where and limit options are ignored
* *
* @return {Promise<undefined>} * @return {Promise<undefined>}
*/ */
Model.prototype.destroy = function(where, options) { Model.prototype.destroy = function(where, options) {
options = options || {}; options = Utils._.extend({
options.force = options.force === undefined ? false : Boolean(options.force); hooks: true,
individualHooks: false,
force: false
}, options || {});
options.type = QueryTypes.BULKDELETE; options.type = QueryTypes.BULKDELETE;
var self = this var self = this
, query = null , daos;
, args = [];
return self.runHooks(self.options.hooks.beforeBulkDestroy, where).then(function(newWhere) {
where = newWhere || where;
if (self._timestampAttributes.deletedAt && options.force === false) {
var attrValueHash = {};
attrValueHash[self._timestampAttributes.deletedAt] = Utils.now();
query = 'bulkUpdate';
args = [self.getTableName(), attrValueHash, where, self];
} else {
query = 'bulkDelete';
args = [self.getTableName(), where, options, self];
}
var runQuery = function(records) {
return self.QueryInterface[query].apply(self.QueryInterface, args).then(function(results) {
if (options && options.hooks === true) {
var tick = 0;
var next = function(i) {
return self.runHooks(self.options.hooks.afterDestroy, records[i]).then(function(newValues) {
records[i].dataValues = !!newValues ? newValues.dataValues : records[i].dataValues;
tick++;
if (tick >= records.length) { return Promise.try(function() {
return self.runHooks(self.options.hooks.afterBulkDestroy, where).return (results); // Run before hook
if (options.hooks) {
return self.runHooks('beforeBulkDestroy', where).spread(function(_where) {
where = _where || where;
});
} }
}).then(function() {
return next(tick); // Get daos and run beforeDestroy hook on each record individually
if (options.individualHooks) {
return self.all({where: where}, {transaction: options.transaction}).map(function(dao) {
return self.runHooks('beforeDestroy', dao).spread(function(_dao) {
return _dao || dao;
}); });
}; }).then(function(_daos) {
daos = _daos;
return next(tick); });
}
}).then(function() {
// Run delete query (or update if paranoid)
if (self._timestampAttributes.deletedAt && !options.force) {
var attrValueHash = {};
attrValueHash[self._timestampAttributes.deletedAt] = Utils.now(self.modelManager.sequelize.options.dialect);
return self.QueryInterface.bulkUpdate(self.getTableName(), attrValueHash, where, options, self.rawAttributes);
} else { } else {
return self.runHooks(self.options.hooks.afterBulkDestroy, where).return (results); return self.QueryInterface.bulkDelete(self.getTableName(), where, options, self);
} }
}).tap(function() {
// Run afterDestroy hook on each record individually
if (options.individualHooks) {
return Promise.map(daos, function(dao) {
return self.runHooks('afterDestroy', dao);
}); });
};
if (options && options.hooks === true) {
var tick = 0;
return self.all({where: where}).then(function(records) {
var next = function(i) {
return self.runHooks(self.options.hooks.beforeDestroy, records[i]).then(function(newValues) {
records[i].dataValues = !!newValues ? newValues.dataValues : records[i].dataValues;
tick++;
if (tick >= records.length) {
return runQuery(records);
} }
}).tap(function() {
return next(tick); // Run after hook
}); if (options.hooks) {
}; return self.runHooks('afterBulkDestroy', where);
return next(tick);
});
} else {
return runQuery();
} }
}).then(function(affectedRows) {
return affectedRows;
}); });
}; };
...@@ -1331,98 +1322,146 @@ module.exports = (function() { ...@@ -1331,98 +1322,146 @@ module.exports = (function() {
* @param {Object where Options to describe the scope of the search. Note that these options are not wrapped in a { where: ... } is in find / findAll calls etc. This is probably due to change in 2.0 * @param {Object where Options to describe the scope of the search. Note that these options are not wrapped in a { where: ... } is in find / findAll calls etc. This is probably due to change in 2.0
* @param {Object} [options] * @param {Object} [options]
* @param {Boolean} [options.validate=true] Should each row be subject to validation before it is inserted. The whole insert will fail if one row fails validation * @param {Boolean} [options.validate=true] Should each row be subject to validation before it is inserted. The whole insert will fail if one row fails validation
* @param {Boolean} [options.hooks=false] Run before / after bulkUpdate hooks? * @param {Boolean} [options.hooks=true] Run before / after bulk update hooks?
* @param {Boolean} [options.individualHooks=false] Run before / after update hooks?
* @param {Number} [options.limit] How many rows to update (only for mysql and mariadb) * @param {Number} [options.limit] How many rows to update (only for mysql and mariadb)
* @deprecated The syntax is due for change, in order to make `where` more consistent with the rest of the API * @deprecated The syntax is due for change, in order to make `where` more consistent with the rest of the API
* *
* @return {Promise} * @return {Promise}
*/ */
Model.prototype.update = function(attrValueHash, where, options) { Model.prototype.update = function(attrValueHash, where, options) {
var self = this var self = this;
, tick = 0;
options = Utils._.extend({
validate: true,
hooks: true,
individualHooks: false,
force: false
}, options || {});
options = options || {};
options.validate = options.validate === undefined ? true : Boolean(options.validate);
options.hooks = options.hooks === undefined ? false : Boolean(options.hooks);
options.type = QueryTypes.BULKUPDATE; options.type = QueryTypes.BULKUPDATE;
if (self._timestampAttributes.updatedAt) { if (self._timestampAttributes.updatedAt) {
attrValueHash[self._timestampAttributes.updatedAt] = Utils.now(); attrValueHash[self._timestampAttributes.updatedAt] = Utils.now(self.modelManager.sequelize.options.dialect);
} }
var runSave = function() { var daos
return self.runHooks(self.options.hooks.beforeBulkUpdate, attrValueHash, where).spread(function(attributes, _where) { , attrValueHashUse;
where = _where || where;
attrValueHash = attributes || attrValueHash;
var runQuery = function(records) { return Promise.try(function() {
return self.QueryInterface.bulkUpdate(self.getTableName(), attrValueHash, where, options, self.rawAttributes).then(function(results) { // Validate
if (options && options.hooks === true && !!records && records.length > 0) { if (options.validate) {
var tick = 0; var build = self.build(attrValueHash);
var next = function(i) {
return self.runHooks(self.options.hooks.afterUpdate, records[i]).then(function(newValues) {
records[i].dataValues = (!!newValues && newValues.dataValues) ? newValues.dataValues : records[i].dataValues;
tick++;
if (tick >= records.length) { // We want to skip validations for all other fields
return self.runHooks(self.options.hooks.afterBulkUpdate, attrValueHash, where).return (records); var skippedFields = Utils._.difference(Object.keys(self.attributes), Object.keys(attrValueHash));
}
return next(tick); return build.hookValidate({skip: skippedFields}).then(function(attributes) {
if (attributes && attributes.dataValues) {
attrValueHash = Utils._.pick.apply(Utils._, [].concat(attributes.dataValues).concat(Object.keys(attrValueHash)));
}
}); });
};
return next(tick);
} else {
return self.runHooks(self.options.hooks.afterBulkUpdate, attrValueHash, where).return (results);
} }
}).then(function() {
// Run before hook
if (options.hooks) {
return self.runHooks('beforeBulkUpdate', attrValueHash, where).spread(function(_attrValueHash, _where) {
where = _where || where;
attrValueHash = _attrValueHash || attrValueHash;
}); });
}; }
}).then(function() {
attrValueHashUse = attrValueHash;
if (options.hooks === true) { // Get daos and run beforeDestroy hook on each record individually
return self.all({where: where}).then(function(records) { if (options.individualHooks) {
if (records === null || records.length < 1) { return self.all({where: where}, {transaction: options.transaction}).then(function(_daos) {
return runQuery(); daos = _daos;
if (!daos.length) {
return [];
} }
var next = function(i) { // Run beforeUpdate hooks on each record and check whether beforeUpdate hook changes values uniformly
return self.runHooks(self.options.hooks.beforeUpdate, records[i]).then(function(newValues) { // i.e. whether they change values for each record in the same way
records[i].dataValues = (!!newValues && newValues.dataValues) ? newValues.dataValues : records[i].dataValues; var changedValues
tick++; , different = false;
if (tick >= records.length) { return Promise.map(daos, function(dao) {
return runQuery(records); // Record updates in dao's dataValues
} Utils._.extend(dao.dataValues, attrValueHash);
return next(tick); // Run beforeUpdate hook
}); return self.runHooks('beforeUpdate', dao).spread(function(_dao) {
}; dao = _dao || dao;
return next(tick); if (!different) {
}); var thisChangedValues = {};
} else { Utils._.forIn(dao.dataValues, function(newValue, attr) {
return runQuery(); if (newValue !== dao._previousDataValues[attr]) {
thisChangedValues[attr] = newValue;
} }
}); });
};
if (options.validate === true) { if (!changedValues) {
var build = self.build(attrValueHash); changedValues = thisChangedValues;
} else {
different = !Utils._.isEqual(changedValues, thisChangedValues);
}
}
// We want to skip validations for all other fields return dao;
var updatedFields = Object.keys(attrValueHash); });
var skippedFields = Utils._.difference(Object.keys(self.attributes), updatedFields); }).then(function(_daos) {
daos = _daos;
return build.hookValidate({skip: skippedFields}).then(function(attributes) { if (!different) {
if (!!attributes && !!attributes.dataValues) { // Hooks do not change values or change them uniformly
attrValueHash = Utils._.pick.apply(Utils._, [].concat(attributes.dataValues).concat(Object.keys(attrValueHash))); if (Object.keys(changedValues).length) {
// Hooks change values - record changes in attrValueHashUse so they are executed
attrValueHashUse = changedValues;
}
return;
} else {
// Hooks change values in a different way for each record
// Do not run original query but save each record individually
return Promise.map(daos, function(dao) {
return dao.save({transaction: options.transaction, hooks: false});
}).tap(function(_daos) {
daos = _daos;
});
}
});
});
}
}).then(function(results) {
if (results) {
// Update already done row-by-row - exit
return [results.length, results];
} }
return runSave(); // Run query to update all rows
return self.QueryInterface.bulkUpdate(self.getTableName(), attrValueHashUse, where, options, self.rawAttributes).then(function(affectedRows) {
return [affectedRows];
});
}).tap(function(result) {
if (options.individualHooks) {
return Promise.map(daos, function(dao) {
return self.runHooks('afterUpdate', dao).spread(function(_dao) {
return _dao || dao;
});
}).then(function(_daos) {
result[1] = daos = _daos;
}); });
} else {
return runSave();
} }
}).tap(function() {
// Run after hook
if (options.hooks) {
return self.runHooks('afterBulkUpdate', attrValueHash, where);
}
}).then(function(result) {
// Return result in form [affectedRows, daos] (daos missed off if options.individualHooks != true)
return result;
});
}; };
/** /**
......
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!