不要怂,就是干,撸起袖子干!

Commit e8a518e3 by Jochem Maas

merge sequelize@1.7.0-alpha1 - trying to keep the PR up2date

2 parents 9866c5d5 c9f6924b
Showing with 2266 additions and 193 deletions
......@@ -4,4 +4,4 @@ test*.js
.DS_STORE
node_modules
npm-debug.log
*~
*~
\ No newline at end of file
{
"globals": {
"jasmine": false,
"spyOn": false,
"it": false,
"console": false,
"describe": false,
"expect": false,
"beforeEach": false,
"waits": false,
"waitsFor": false,
"runs": false
},
"camelcase": true,
"curly": true,
"forin": true,
"indent": 2,
"unused": true,
"asi": true,
"evil": false,
"laxcomma": true
}
\ No newline at end of file
......@@ -19,5 +19,4 @@ env:
language: node_js
node_js:
- 0.8
- 0.8
\ No newline at end of file
......@@ -2,6 +2,9 @@
The Sequelize library provides easy access to MySQL, SQLite or PostgreSQL databases by mapping database entries to objects and vice versa. To put it in a nutshell... it's an ORM (Object-Relational-Mapper). The library is written entirely in JavaScript and can be used in the Node.JS environment.
<a href="http://flattr.com/thing/1259407/Sequelize" target="_blank">
<img src="http://api.flattr.com/button/flattr-badge-large.png" alt="Flattr this" title="Flattr this" border="0" /></a>
## Important Notes ##
### 1.6.0 ###
......@@ -35,7 +38,7 @@ Also make sure to take a look at the examples in the repository. The website wil
- [Documentation](http://www.sequelizejs.com)
- [Twitter](http://twitter.com/sdepold)
- [IRC](irc://irc.freenode.net/sequelizejs)
- [IRC](http://webchat.freenode.net?channels=sequelizejs)
- [Google Groups](https://groups.google.com/forum/#!forum/sequelize)
- [XING](https://www.xing.com/net/priec1b5cx/sequelize) (pretty much inactive, but you might want to name it on your profile)
......@@ -43,21 +46,18 @@ Also make sure to take a look at the examples in the repository. The website wil
A very basic roadmap. Chances aren't too bad, that not mentioned things are implemented as well. Don't panic :)
### 1.6.0 (ToDo)
- ~~Fix last issues with eager loading of associated data~~
- ~~Find out why Person.belongsTo(House) would add person_id to house. It should add house_id to person~~
### 1.7.0
- Check if lodash is a proper alternative to current underscore usage.
- ~~Check if lodash is a proper alternative to current underscore usage.~~
- Transactions
- Support for update of tables without primary key
- MariaDB support
- Support for update and delete calls for whole tables without previous loading of instances
- ~~Support for update and delete calls for whole tables without previous loading of instances~~ Implemented in [#569](https://github.com/sequelize/sequelize/pull/569) thanks to @optiltude
- Eager loading of nested associations [#388](https://github.com/sdepold/sequelize/issues/388#issuecomment-12019099)
- Model#delete
- Validate a model before it gets saved. (Move validation of enum attribute value to validate method)
- BLOB [#99](https://github.com/sdepold/sequelize/issues/99)
- Support for foreign keys
- ~~Validate a model before it gets saved.~~ Implemented in [#601](https://github.com/sequelize/sequelize/pull/601), thanks to @durango
- Move validation of enum attribute value to validate method
- BLOB [#99](https://github.com/sequelize/sequelize/issues/99)
- ~~Support for foreign keys~~ Implemented in [#595](https://github.com/sequelize/sequelize/pull/595), thanks to @optilude
### 1.7.x
- Complete support for non-id primary keys
......@@ -71,6 +71,8 @@ A very basic roadmap. Chances aren't too bad, that not mentioned things are impl
### 2.0.0
- ~~save datetimes in UTC~~
- encapsulate attributes if a dao inside the attributes property + add getters and setters
- add proper error message everywhere
## Collaboration 2.0 ##
......@@ -234,4 +236,4 @@ for (var key in obj) {
The automated tests we talk about just so much are running on
[Travis public CI](http://travis-ci.org), here is its status:
[![Build Status](https://secure.travis-ci.org/sdepold/sequelize.png)](http://travis-ci.org/sdepold/sequelize)
[![Build Status](https://secure.travis-ci.org/sequelize/sequelize.png)](http://travis-ci.org/sequelize/sequelize)
......@@ -8,6 +8,7 @@ const path = require("path")
, _ = Sequelize.Utils._
var configPath = process.cwd() + '/config'
, environment = process.env.NODE_ENV || 'development'
, migrationsPath = process.cwd() + '/migrations'
, packageJsonPath = __dirname + '/../package.json'
, packageJson = JSON.parse(fs.readFileSync(packageJsonPath).toString())
......@@ -50,34 +51,52 @@ var createMigrationsFolder = function(force) {
}
var readConfig = function() {
var config
try {
var config = JSON.parse(fs.readFileSync(configFile))
, env = process.env.NODE_ENV || 'development'
config = fs.readFileSync(configFile)
} catch(e) {
throw new Error('Error reading "config/config.json".')
}
if (config[env]) {
config = config[env]
}
try {
config = JSON.parse(config)
} catch (e) {
throw new Error('Error parsing "config/config.json" as JSON.')
}
return config
} catch(e) {
throw new Error('The config.json is not available or contains invalid JSON.')
if (config[environment]) {
config = config[environment]
}
return config
}
program
.version(packageJson.version)
.option('-i, --init', 'Initializes the project. Creates a config/config.json')
.option('-m, --migrate', 'Runs undone migrations')
.option('-i, --init', 'Initializes the project.')
.option('-e, --env <environment>', 'Specify the environment.')
.option('-m, --migrate', 'Run pending migrations.')
.option('-u, --undo', 'Undo the last migration.')
.option('-f, --force', 'Forces the action to be done.')
.option('-c, --create-migration [migration-name]', 'Create a new migration skeleton file.')
.option('-c, --create-migration [migration-name]', 'Creates a new migration.')
.parse(process.argv)
if(typeof program.env === 'string') {
environment = program.env
}
console.log("Using environment '" + environment + "'.")
if(program.migrate) {
if(configFileExists) {
var config = readConfig()
var config
, options = {}
try {
config = readConfig()
} catch(e) {
console.log(e.message)
process.exit(1)
}
_.each(config, function(value, key) {
if(['database', 'username', 'password'].indexOf(key) == -1) {
options[key] = value
......@@ -104,7 +123,8 @@ if(program.migrate) {
sequelize.migrate()
}
} else {
throw new Error('Please add a configuration file under config/config.json. You might run "sequelize --init".')
console.log('Cannot find "config/config.json". Have you run "sequelize --init"?')
process.exit(1)
}
} else if(program.init) {
if(!configFileExists || !!program.force) {
......@@ -129,9 +149,10 @@ if(program.migrate) {
}
})
console.log('Successfully created config.json')
console.log('Created "config/config.json"')
} else {
console.log('A config.json already exists. Run "sequelize --init --force" to overwrite it.')
console.log('"config/config.json" already exists. Run "sequelize --init --force" to overwrite.')
process.exit(1)
}
createMigrationsFolder(program.force)
......@@ -140,21 +161,23 @@ if(program.migrate) {
var migrationName = [
moment().format('YYYYMMDDHHmmss'),
(typeof program.createMigration == 'string') ? program.createMigration : 'unnamed-migration'
(typeof program.createMigration === 'string') ? program.createMigration : 'unnamed-migration'
].join('-') + '.js'
var migrationContent = [
"module.exports = {",
" up: function(migration, DataTypes) {",
" // add altering commands here",
" up: function(migration, DataTypes, done) {",
" // add altering commands here, calling 'done' when finished",
" done()",
" },",
" down: function(migration) {",
" // add reverting commands here",
" down: function(migration, DataTypes, done) {",
" // add reverting commands here, calling 'done' when finished",
" done()",
" }",
"}"
].join('\n')
fs.writeFileSync(migrationsPath + '/' + migrationName, migrationContent)
} else {
console.log('Please define any params!')
console.log('Try "sequelize --help" for usage information.')
}
# v1.7.0 #
- [DEPENDENCIES] Upgraded validator for IPv6 support. [#603](https://github.com/sequelize/sequelize/pull/603). thanks to durango
- [FEATURE] Validate a model before it gets saved. [#601](https://github.com/sequelize/sequelize/pull/601). thanks to durango
- [DEPENDENCIES] replaced underscore by lodash. [#954](https://github.com/sequelize/sequelize/pull/594). thanks to durango
- [BUG] Fix string escape with postgresql on raw SQL queries. [#586](https://github.com/sequelize/sequelize/pull/586). thanks to zanamixx
- [BUG] "order by" is now after "group by". [#585](https://github.com/sequelize/sequelize/pull/585). thanks to mekanics
- [BUG] Added decimal support for min/max. [#583](https://github.com/sequelize/sequelize/pull/583). thanks to durango
- [BUG] Null dates don't break SQLite anymore. [#572](https://github.com/sequelize/sequelize/pull/572). thanks to mweibel
- [BUG] Correctly handle booleans in MySQL. [#608](https://github.com/sequelize/sequelize/pull/608). Thanks to terraflubb
- [BUG] Fixed empty where conditions in MySQL. [#619](https://github.com/sequelize/sequelize/pull/619). Thanks to terraflubb
- [FEATURE] Schematics. [#564](https://github.com/sequelize/sequelize/pull/564). thanks to durango
- [FEATURE] Foreign key constraints. [#595](https://github.com/sequelize/sequelize/pull/595). thanks to optilude
- [FEATURE] Support for bulk insert (`<DAOFactory>.bulkCreate()`, update (`<DAOFactory>.update()`) and delete (`<DAOFactory>.destroy()`) [#569](https://github.com/sequelize/sequelize/pull/569). thanks to optilude
- [FEATURE] Add an extra `queryOptions` parameter to `DAOFactory.find` and `DAOFactory.findAll`. This allows a user to specify `{ raw: true }`, meaning that the raw result should be returned, instead of built DAOs. Usefull for queries returning large datasets, see [#611](https://github.com/sequelize/sequelize/pull/611) janmeier
- [FEATURE] Added convenient data types. [#616](https://github.com/sequelize/sequelize/pull/616). Thanks to Costent
- [FEATURE] Binary is more verbose now. [#612](https://github.com/sequelize/sequelize/pull/612). Thanks to terraflubb
# v1.6.0 #
- [DEPENDENCIES] upgrade mysql to alpha7. You *MUST* use this version or newer for DATETIMEs to work
- [DEPENDENCIES] upgraded most dependencies. most important: mysql was upgraded to 2.0.0-alpha-3
......
var Utils = require("./../utils")
, DataTypes = require('./../data-types')
, Helpers = require('./helpers')
module.exports = (function() {
var BelongsTo = function(srcDAO, targetDAO, options) {
......@@ -24,6 +25,7 @@ module.exports = (function() {
this.identifier = this.options.foreignKey || Utils._.underscoredIf(Utils.singularize(this.target.tableName) + "Id", this.source.options.underscored)
newAttributes[this.identifier] = { type: DataTypes.INTEGER }
Helpers.addForeignKeyConstraints(newAttributes[this.identifier], this.target, this.source, this.options)
Utils._.defaults(this.source.rawAttributes, newAttributes)
// Sync attributes to DAO proto each time a new assoc is added
......
var Utils = require("./../utils")
, DataTypes = require('./../data-types')
, Helpers = require('./helpers')
var HasManySingleLinked = require("./has-many-single-linked")
, HasManyMultiLinked = require("./has-many-double-linked")
......@@ -65,6 +66,7 @@ module.exports = (function() {
} else {
var newAttributes = {}
newAttributes[this.identifier] = { type: DataTypes.INTEGER }
Helpers.addForeignKeyConstraints(newAttributes[this.identifier], this.source, this.target, this.options)
Utils._.defaults(this.target.rawAttributes, newAttributes)
}
......
var Utils = require("./../utils")
, DataTypes = require('./../data-types')
, Helpers = require("./helpers")
module.exports = (function() {
var HasOne = function(srcDAO, targetDAO, options) {
......@@ -29,6 +30,7 @@ module.exports = (function() {
this.identifier = this.options.foreignKey || Utils._.underscoredIf(Utils.singularize(this.source.tableName) + "Id", this.options.underscored)
newAttributes[this.identifier] = { type: DataTypes.INTEGER }
Helpers.addForeignKeyConstraints(newAttributes[this.identifier], this.source, this.target, this.options)
Utils._.defaults(this.target.rawAttributes, newAttributes)
// Sync attributes to DAO proto each time a new assoc is added
......
var Utils = require("./../utils")
module.exports = {
addForeignKeyConstraints: function(newAttribute, source, target, options) {
// FK constraints are opt-in: users must either rset `foreignKeyConstraints`
// on the association, or request an `onDelete` or `onUpdate` behaviour
if(options.foreignKeyConstraint || options.onDelete || options.onUpdate) {
// Find primary keys: composite keys not supported with this approach
var primaryKeys = Utils._.filter(Utils._.keys(source.rawAttributes), function(key) {
return source.rawAttributes[key].primaryKey
})
if(primaryKeys.length == 1) {
newAttribute.references = source.tableName,
newAttribute.referencesKey = primaryKeys[0]
newAttribute.onDelete = options.onDelete,
newAttribute.onUpdate = options.onUpdate
}
}
}
}
var Toposort = require('toposort-class')
module.exports = (function() {
var DAOFactoryManager = function(sequelize) {
this.daos = []
......@@ -31,5 +33,34 @@ module.exports = (function() {
return this.daos
})
/**
* Iterate over DAOs in an order suitable for e.g. creating tables. Will
* take foreign key constraints into account so that dependencies are visited
* before dependents.
*/
DAOFactoryManager.prototype.forEachDAO = function(iterator) {
var daos = {}
, sorter = new Toposort()
this.daos.forEach(function(dao) {
daos[dao.tableName] = dao
var deps = []
for(var attrName in dao.rawAttributes) {
if(dao.rawAttributes.hasOwnProperty(attrName)) {
if(dao.rawAttributes[attrName].references) {
deps.push(dao.rawAttributes[attrName].references)
}
}
}
sorter.add(dao.tableName, deps)
})
sorter.sort().reverse().forEach(function(name) {
iterator(daos[name])
})
}
return DAOFactoryManager
})()
......@@ -16,7 +16,9 @@ module.exports = (function() {
underscored: false,
syncOnAssociation: true,
paranoid: false,
whereCollection: null
whereCollection: null,
schema: null,
schemaDelimiter: ''
}, options || {})
this.name = name
......@@ -54,11 +56,6 @@ module.exports = (function() {
this.primaryKeys = {};
Utils._.each(this.attributes, function(dataTypeString, attributeName) {
// If you don't specify a valid data type lets help you debug it
if (dataTypeString === undefined) {
throw new Error("Unrecognized data type for field " + attributeName );
}
if ((attributeName !== 'id') && (dataTypeString.indexOf('PRIMARY KEY') !== -1)) {
self.primaryKeys[attributeName] = dataTypeString
}
......@@ -134,7 +131,7 @@ module.exports = (function() {
return new Utils.CustomEventEmitter(function(emitter) {
var doQuery = function() {
self.QueryInterface
.createTable(self.tableName, self.attributes, options)
.createTable(self.getTableName(), self.attributes, options)
.success(function() { emitter.emit('success', self) })
.error(function(err) { emitter.emit('error', err) })
.on('sql', function(sql) { emitter.emit('sql', sql) })
......@@ -152,18 +149,40 @@ module.exports = (function() {
return this.QueryInterface.dropTable(this.tableName)
}
DAOFactory.prototype.dropSchema = function(schema) {
return this.QueryInterface.dropSchema(schema)
}
DAOFactory.prototype.schema = function(schema, options) {
this.options.schema = schema
if (!!options) {
if (typeof options === "string") {
this.options.schemaDelimiter = options
} else {
if (!!options.schemaDelimiter) {
this.options.schemaDelimiter = options.schemaDelimiter
}
}
}
return this
}
DAOFactory.prototype.getTableName = function() {
return this.QueryGenerator.addSchema(this)
}
// alias for findAll
DAOFactory.prototype.all = function(options) {
return this.findAll(options)
DAOFactory.prototype.all = function(options, queryOptions) {
return this.findAll(options, queryOptions)
}
DAOFactory.prototype.findAll = function(options) {
DAOFactory.prototype.findAll = function(options, queryOptions) {
var hasJoin = false
var options = Utils._.clone(options)
if (typeof options === 'object') {
hasJoin = true
if (options.hasOwnProperty('include')) {
hasJoin = true
......@@ -176,10 +195,10 @@ module.exports = (function() {
this.options.whereCollection = options.where || null
}
return this.QueryInterface.select(this, this.tableName, options, {
return this.QueryInterface.select(this, this.tableName, options, Utils._.defaults({
type: 'SELECT',
hasJoin: hasJoin
})
}, queryOptions))
}
//right now, the caller (has-many-double-linked) is in charge of the where clause
......@@ -190,7 +209,7 @@ module.exports = (function() {
// whereCollection is used for non-primary key updates
this.options.whereCollection = optcpy.where || null;
return this.QueryInterface.select(this, [this.tableName, joinTableName], optcpy, { type: 'SELECT' })
return this.QueryInterface.select(this, [this.getTableName(), joinTableName], optcpy, { type: 'SELECT' })
}
/**
......@@ -198,9 +217,10 @@ module.exports = (function() {
*
* @param {Object} options Options to describe the scope of the search.
* @param {Array} include A list of associations which shall get eagerly loaded. Supported is either { include: [ DaoFactory1, DaoFactory2, ...] } or { include: [ { daoFactory: DaoFactory1, as: 'Alias' } ] }.
* @param {Object} set the query options, e.g. raw, specifying that you want raw data instead of built DAOs
* @return {Object} A promise which fires `success`, `error`, `complete` and `sql`.
*/
DAOFactory.prototype.find = function(options) {
DAOFactory.prototype.find = function(options, queryOptions) {
var hasJoin = false
// no options defined?
......@@ -252,11 +272,11 @@ module.exports = (function() {
options.limit = 1
return this.QueryInterface.select(this, this.tableName, options, {
return this.QueryInterface.select(this, this.getTableName(), options, Utils._.defaults({
plain: true,
type: 'SELECT',
hasJoin: hasJoin
})
}, queryOptions))
}
DAOFactory.prototype.count = function(options) {
......@@ -264,22 +284,22 @@ module.exports = (function() {
options.attributes.push(['count(*)', 'count'])
options.parseInt = true
return this.QueryInterface.rawSelect(this.tableName, options, 'count')
return this.QueryInterface.rawSelect(this.getTableName(), options, 'count')
}
DAOFactory.prototype.max = function(field, options) {
options = Utils._.extend({ attributes: [] }, options || {})
options.attributes.push(['max(' + field + ')', 'max'])
options.parseInt = true
options.parseFloat = true
return this.QueryInterface.rawSelect(this.tableName, options, 'max')
return this.QueryInterface.rawSelect(this.getTableName(), options, 'max')
}
DAOFactory.prototype.min = function(field, options) {
options = Utils._.extend({ attributes: [] }, options || {})
options.attributes.push(['min(' + field + ')', 'min'])
options.parseInt = true
options.parseFloat = true
return this.QueryInterface.rawSelect(this.tableName, options, 'min')
return this.QueryInterface.rawSelect(this.getTableName(), options, 'min')
}
DAOFactory.prototype.build = function(values, options) {
......@@ -327,6 +347,110 @@ module.exports = (function() {
}).run()
}
/**
* Create and insert multiple instances
*
* @param {Array} records List of objects (key/value pairs) to create instances from
* @param {Array} fields Fields to insert (defaults to all fields)
* @return {Object} A promise which fires `success`, `error`, `complete` and `sql`.
*
* Note: the `success` handler is not passed any arguments. To obtain DAOs for
* the newly created values, you will need to query for them again. This is
* because MySQL and SQLite do not make it easy to obtain back automatically
* generated IDs and other default values in a way that can be mapped to
* multiple records
*/
DAOFactory.prototype.bulkCreate = function(records, fields) {
var self = this
, daos = records.map(function(v) { return self.build(v) })
, updatedAtAttr = self.options.underscored ? 'updated_at' : 'updatedAt'
, createdAtAttr = self.options.underscored ? 'created_at' : 'createdAt'
// we will re-create from DAOs, which may have set up default attributes
records = []
if (fields) {
// Always insert updated and created time stamps
if (self.options.timestamps) {
if (fields.indexOf(updatedAtAttr) === -1) {
fields.push(updatedAtAttr)
}
if (fields.indexOf(createdAtAttr) === -1) {
fields.push(createdAtAttr)
}
}
// Build records for the fields we know about
daos.forEach(function(dao) {
var values = {};
fields.forEach(function(field) {
values[field] = dao.values[field]
})
if (self.options.timestamps) {
values[updatedAtAttr] = Utils.now()
}
records.push(values);
})
} else {
daos.forEach(function(dao) {
records.push(dao.values)
})
}
// Validate enums
records.forEach(function(values) {
for (var attrName in self.rawAttributes) {
if (self.rawAttributes.hasOwnProperty(attrName)) {
var definition = self.rawAttributes[attrName]
, isEnum = (definition.type && (definition.type.toString() === DataTypes.ENUM.toString()))
, hasValue = (typeof values[attrName] !== 'undefined')
, valueOutOfScope = ((definition.values || []).indexOf(values[attrName]) === -1)
if (isEnum && hasValue && valueOutOfScope) {
throw new Error('Value "' + values[attrName] + '" for ENUM ' + attrName + ' is out of allowed scope. Allowed values: ' + definition.values.join(', '))
}
}
}
})
return self.QueryInterface.bulkInsert(self.tableName, records)
}
/**
* Delete multiple instances
*
* @param {Object} where Options to describe the scope of the search.
* @return {Object} A promise which fires `success`, `error`, `complete` and `sql`.
*/
DAOFactory.prototype.destroy = function(where) {
if (this.options.timestamps && this.options.paranoid) {
var attr = this.options.underscored ? 'deleted_at' : 'deletedAt'
var attrValueHash = {}
attrValueHash[attr] = Utils.now()
return this.QueryInterface.bulkUpdate(this.tableName, attrValueHash, where)
} else {
return this.QueryInterface.bulkDelete(this.tableName, where)
}
}
/**
* Update multiple instances
*
* @param {Object} attrValueHash A hash of fields to change and their new values
* @param {Object} where Options to describe the scope of the search.
* @return {Object} A promise which fires `success`, `error`, `complete` and `sql`.
*/
DAOFactory.prototype.update = function(attrValueHash, where) {
if(this.options.timestamps) {
var attr = this.options.underscored ? 'updated_at' : 'updatedAt'
attrValueHash[attr] = Utils.now()
}
return this.QueryInterface.bulkUpdate(this.tableName, attrValueHash, where)
}
// private
var query = function() {
......
......@@ -122,12 +122,27 @@ module.exports = (function() {
if (this.daoFactory.rawAttributes.hasOwnProperty(attrName)) {
var definition = this.daoFactory.rawAttributes[attrName]
, isEnum = (definition.type && (definition.type.toString() === DataTypes.ENUM.toString()))
, isHstore = (!!definition.type && !!definition.type.type && definition.type.type === DataTypes.HSTORE.type)
, hasValue = (typeof values[attrName] !== 'undefined')
, valueOutOfScope = ((definition.values || []).indexOf(values[attrName]) === -1)
if (isEnum && hasValue && valueOutOfScope) {
throw new Error('Value "' + values[attrName] + '" for ENUM ' + attrName + ' is out of allowed scope. Allowed values: ' + definition.values.join(', '))
}
if (isHstore) {
if (typeof values[attrName] === "object") {
var text = []
Utils._.each(values[attrName], function(value, key){
if (typeof value !== "string" && typeof value !== "number") {
throw new Error('Value for HSTORE must be a string or number.')
}
text.push(this.QueryInterface.QueryGenerator.addQuotes(key) + '=>' + (typeof value === "string" ? this.QueryInterface.QueryGenerator.addQuotes(value) : value))
}.bind(this))
values[attrName] = text.join(',')
}
}
}
}
......@@ -135,8 +150,15 @@ module.exports = (function() {
this.dataValues[updatedAtAttr] = values[updatedAtAttr] = Utils.now()
}
if (this.isNewRecord) {
return this.QueryInterface.insert(this, this.__factory.tableName, values)
var errors = this.validate()
if (!!errors) {
return new Utils.CustomEventEmitter(function(emitter) {
emitter.emit('error', errors)
}).run()
}
else if (this.isNewRecord) {
return this.QueryInterface.insert(this, this.QueryInterface.QueryGenerator.addSchema(this.__factory), values)
} else {
var identifier = this.__options.hasPrimaryKeys ? this.primaryKeyValues : this.id;
......@@ -144,7 +166,7 @@ module.exports = (function() {
identifier = this.__options.whereCollection;
}
var tableName = this.__factory.tableName
var tableName = this.QueryInterface.QueryGenerator.addSchema(this.__factory)
, query = this.QueryInterface.update(this, tableName, values, identifier)
return query
......@@ -196,7 +218,9 @@ module.exports = (function() {
Utils._.each(self.values, function(value, field) {
// if field has validators
if (self.validators.hasOwnProperty(field)) {
var hasAllowedNull = (self.rawAttributes[field].allowNull && self.rawAttributes[field].allowNull === true && (value === null || value === undefined));
if (self.validators.hasOwnProperty(field) && !hasAllowedNull) {
// for each validator
Utils._.each(self.validators[field], function(details, validatorType) {
......@@ -281,7 +305,7 @@ module.exports = (function() {
return this.save()
} else {
var identifier = this.__options.hasPrimaryKeys ? this.primaryKeyValues : this.id
return this.QueryInterface.delete(this, this.__factory.tableName, identifier)
return this.QueryInterface.delete(this, this.QueryInterface.QueryGenerator.addSchema(this.__factory.tableName, this.__factory.options.schema), identifier)
}
}
......@@ -301,7 +325,7 @@ module.exports = (function() {
values = fields;
}
return this.QueryInterface.increment(this, this.__factory.tableName, values, identifier)
return this.QueryInterface.increment(this, this.QueryInterface.QueryGenerator.addSchema(this.__factory.tableName, this.__factory.options.schema), values, identifier)
}
DAO.prototype.decrement = function (fields, count) {
......@@ -387,6 +411,17 @@ module.exports = (function() {
attrs = {},
key;
// add all passed values to the dao and store the attribute names in this.attributes
for (key in values) {
if (values.hasOwnProperty(key)) {
if (typeof values[key] === "string" && !!this.__factory && !!this.__factory.rawAttributes[key] && !!this.__factory.rawAttributes[key].type && !!this.__factory.rawAttributes[key].type.type && this.__factory.rawAttributes[key].type.type === DataTypes.HSTORE.type) {
values[key] = this.QueryInterface.QueryGenerator.toHstore(values[key])
}
this.addAttribute(key, values[key])
}
}
if (this.__options.timestamps && isNewRecord) {
defaults[this.__options.underscored ? 'created_at' : 'createdAt'] = Utils.now()
defaults[this.__options.underscored ? 'updated_at' : 'updatedAt'] = Utils.now()
......
var STRING = function(length, binary) {
if (this instanceof STRING) {
this._binary = !!binary;
if (typeof length === 'number') {
this._length = length;
} else {
this._length = 255;
}
} else {
return new STRING(length, binary);
}
};
STRING.prototype = {
get BINARY() {
this._binary = true;
return this;
},
get type() {
return this.toString();
},
toString: function() {
return 'VARCHAR(' + this._length + ')' + ((this._binary) ? ' BINARY' : '');
}
};
Object.defineProperty(STRING, 'BINARY', {
get: function() {
return new STRING(undefined, true);
}
});
var INTEGER = function() {
return INTEGER.prototype.construct.apply(this, [INTEGER].concat(Array.prototype.slice.apply(arguments)));
};
var BIGINT = function() {
return BIGINT.prototype.construct.apply(this, [BIGINT].concat(Array.prototype.slice.apply(arguments)));
};
var FLOAT = function() {
return FLOAT.prototype.construct.apply(this, [FLOAT].concat(Array.prototype.slice.apply(arguments)));
};
FLOAT._type = FLOAT;
FLOAT._typeName = 'FLOAT';
INTEGER._type = INTEGER;
INTEGER._typeName = 'INTEGER';
BIGINT._type = BIGINT;
BIGINT._typeName = 'BIGINT';
STRING._type = STRING;
STRING._typeName = 'VARCHAR';
STRING.toString = INTEGER.toString = FLOAT.toString = BIGINT.toString = function() {
return new this._type().toString();
};
FLOAT.prototype = BIGINT.prototype = INTEGER.prototype = {
construct: function(RealType, length, decimals, unsigned, zerofill) {
if (this instanceof RealType) {
this._typeName = RealType._typeName;
this._unsigned = !!unsigned;
this._zerofill = !!zerofill;
if (typeof length === 'number') {
this._length = length;
}
if (typeof decimals === 'number') {
this._decimals = decimals;
}
} else {
return new RealType(length, decimals, unsigned, zerofill);
}
},
get type() {
return this.toString();
},
get UNSIGNED() {
this._unsigned = true;
return this;
},
get ZEROFILL() {
this._zerofill = true;
return this;
},
toString: function() {
var result = this._typeName;
if (this._length) {
result += '(' + this._length;
if (typeof this._decimals === 'number') {
result += ',' + this._decimals;
}
result += ')';
}
if (this._unsigned) {
result += ' UNSIGNED';
}
if (this._zerofill) {
result += ' ZEROFILL';
}
return result;
}
};
var unsignedDesc = {
get: function() {
return new this._type(undefined, undefined, true);
}
};
var zerofillDesc = {
get: function() {
return new this._type(undefined, undefined, undefined, true);
}
};
var typeDesc = {
get: function() {
return new this._type().toString();
}
};
Object.defineProperty(STRING, 'type', typeDesc);
Object.defineProperty(INTEGER, 'type', typeDesc);
Object.defineProperty(BIGINT, 'type', typeDesc);
Object.defineProperty(FLOAT, 'type', typeDesc);
Object.defineProperty(INTEGER, 'UNSIGNED', unsignedDesc);
Object.defineProperty(BIGINT, 'UNSIGNED', unsignedDesc);
Object.defineProperty(FLOAT, 'UNSIGNED', unsignedDesc);
Object.defineProperty(INTEGER, 'ZEROFILL', zerofillDesc);
Object.defineProperty(BIGINT, 'ZEROFILL', zerofillDesc);
Object.defineProperty(FLOAT, 'ZEROFILL', zerofillDesc);
module.exports = {
STRING: 'VARCHAR(255)',
STRING: STRING,
TEXT: 'TEXT',
INTEGER: 'INTEGER',
BIGINT: 'BIGINT',
INTEGER: INTEGER,
BIGINT: BIGINT,
DATE: 'DATETIME',
BOOLEAN: 'TINYINT(1)',
FLOAT: 'FLOAT',
FLOAT: FLOAT,
NOW: 'NOW',
get ENUM() {
......@@ -32,5 +172,19 @@ module.exports = {
return result
},
ARRAY: function(type) { return type + '[]' }
ARRAY: function(type) { return type + '[]' },
get HSTORE() {
var result = function() {
return {
type: 'HSTORE'
}
}
result.type = 'HSTORE'
result.toString = result.valueOf = function() { return 'TEXT' }
return result
}
}
......@@ -8,6 +8,39 @@ module.exports = (function() {
var QueryGenerator = {
options: {},
addSchema: function(opts) {
var tableName = undefined
var schema = (!!opts.options && !!opts.options.schema ? opts.options.schema : undefined)
var schemaDelimiter = (!!opts.options && !!opts.options.schemaDelimiter ? opts.options.schemaDelimiter : undefined)
if (!!opts.tableName) {
tableName = opts.tableName
}
else if (typeof opts === "string") {
tableName = opts
}
if (!schema || schema.toString().trim() === "") {
return tableName
}
return QueryGenerator.addQuotes(schema) + '.' + QueryGenerator.addQuotes(tableName)
},
createSchema: function(schema) {
var query = "CREATE SCHEMA <%= schema%>;"
return Utils._.template(query)({schema: schema})
},
dropSchema: function(schema) {
var query = "DROP SCHEMA <%= schema%> CASCADE;"
return Utils._.template(query)({schema: schema})
},
showSchemasQuery: function() {
return "SELECT schema_name FROM information_schema.schemata WHERE schema_name <> 'information_schema' AND schema_name != 'public' AND schema_name !~ E'^pg_';"
},
createTableQuery: function(tableName, attributes, options) {
options = Utils._.extend({
}, options || {})
......@@ -29,7 +62,7 @@ module.exports = (function() {
var values = {
table: QueryGenerator.addQuotes(tableName),
attributes: attrStr.join(", "),
attributes: attrStr.join(", ")
}
var pks = primaryKeys[tableName].map(function(pk){
......@@ -45,9 +78,10 @@ module.exports = (function() {
dropTableQuery: function(tableName, options) {
options = options || {}
var query = "DROP TABLE IF EXISTS <%= table %>;"
var query = "DROP TABLE IF EXISTS <%= table %><%= cascade %>;"
return Utils._.template(query)({
table: QueryGenerator.addQuotes(tableName)
table: QueryGenerator.addQuotes(tableName),
cascade: options.cascade? " CASCADE" : ""
})
},
......@@ -199,7 +233,7 @@ module.exports = (function() {
options.attributes = options.attributes || '*'
if (options.include) {
var optAttributes = [options.table + '.*']
var optAttributes = options.attributes === '*' ? [options.table + '.*'] : [options.attributes]
options.include.forEach(function(include) {
var attributes = Object.keys(include.daoFactory.attributes).map(function(attr) {
......@@ -224,18 +258,11 @@ module.exports = (function() {
options.attributes = optAttributes.join(', ')
}
if(options.where) {
if(options.hasOwnProperty('where')) {
options.where = QueryGenerator.getWhereConditions(options.where)
query += " WHERE <%= where %>"
}
if(options.order) {
options.order = options.order.replace(/([^ ]+)(.*)/, function(m, g1, g2) {
return QueryGenerator.addQuotes(g1) + g2
})
query += " ORDER BY <%= order %>"
}
if(options.group) {
if (Array.isArray(options.group)) {
options.group = options.group.map(function(grp){
......@@ -248,6 +275,13 @@ module.exports = (function() {
query += " GROUP BY <%= group %>"
}
if(options.order) {
options.order = options.order.replace(/([^ ]+)(.*)/, function(m, g1, g2) {
return QueryGenerator.addQuotes(g1) + g2
})
query += " ORDER BY <%= order %>"
}
if (!(options.include && (options.limit === 1))) {
if (options.limit) {
query += " LIMIT <%= limit %>"
......@@ -267,18 +301,7 @@ module.exports = (function() {
attrValueHash = Utils.removeNullValuesFromHash(attrValueHash, this.options.omitNull)
var query = "INSERT INTO <%= table %> (<%= attributes %>) VALUES (<%= values %>) RETURNING *;"
, returning = []
Utils._.forEach(attrValueHash, function(value, key, hash) {
if (tables[tableName] && tables[tableName][key]) {
switch (tables[tableName][key]) {
case 'serial':
delete hash[key]
returning.push(key)
break
}
}
});
, returning = removeSerialsFromHash(tableName, attrValueHash)
var replacements = {
table: QueryGenerator.addQuotes(tableName)
......@@ -293,6 +316,30 @@ module.exports = (function() {
return Utils._.template(query)(replacements)
},
bulkInsertQuery: function(tableName, attrValueHashes) {
var query = "INSERT INTO <%= table %> (<%= attributes %>) VALUES <%= tuples %> RETURNING *;"
, tuples = []
Utils._.forEach(attrValueHashes, function(attrValueHash) {
removeSerialsFromHash(tableName, attrValueHash)
tuples.push("(" +
Utils._.values(attrValueHash).map(function(value){
return QueryGenerator.pgEscape(value)
}).join(",") +
")")
})
var replacements = {
table: QueryGenerator.addQuotes(tableName)
, attributes: Object.keys(attrValueHashes[0]).map(function(attr){
return QueryGenerator.addQuotes(attr)
}).join(",")
, tuples: tuples.join(",")
}
return Utils._.template(query)(replacements)
},
updateQuery: function(tableName, attrValueHash, where) {
attrValueHash = Utils.removeNullValuesFromHash(attrValueHash, this.options.omitNull)
......@@ -315,11 +362,14 @@ module.exports = (function() {
deleteQuery: function(tableName, where, options) {
options = options || {}
options.limit = options.limit || 1
if(Utils._.isUndefined(options.limit)) {
options.limit = 1;
}
primaryKeys[tableName] = primaryKeys[tableName] || [];
var query = "DELETE FROM <%= table %> WHERE <%= primaryKeys %> IN (SELECT <%= primaryKeysSelection %> FROM <%= table %> WHERE <%= where %> LIMIT <%= limit %>)"
var query = "DELETE FROM <%= table %> WHERE <%= primaryKeys %> IN (SELECT <%= primaryKeysSelection %> FROM <%= table %> WHERE <%= where %><%= limit %>)"
var pks;
if (primaryKeys[tableName] && primaryKeys[tableName].length > 0) {
......@@ -333,7 +383,7 @@ module.exports = (function() {
var replacements = {
table: QueryGenerator.addQuotes(tableName),
where: QueryGenerator.getWhereConditions(where),
limit: QueryGenerator.pgEscape(options.limit),
limit: !!options.limit? " LIMIT " + QueryGenerator.pgEscape(options.limit) : "",
primaryKeys: primaryKeys[tableName].length > 1 ? '(' + pks + ')' : pks,
primaryKeysSelection: pks
}
......@@ -523,6 +573,28 @@ module.exports = (function() {
template += " PRIMARY KEY"
}
if(dataType.references) {
template += " REFERENCES <%= referencesTable %> (<%= referencesKey %>)"
replacements.referencesTable = QueryGenerator.addQuotes(dataType.references)
if(dataType.referencesKey) {
replacements.referencesKey = QueryGenerator.addQuotes(dataType.referencesKey)
} else {
replacements.referencesKey = QueryGenerator.addQuotes('id')
}
if(dataType.onDelete) {
template += " ON DELETE <%= onDeleteAction %>"
replacements.onDeleteAction = dataType.onDelete.toUpperCase()
}
if(dataType.onUpdate) {
template += " ON UPDATE <%= onUpdateAction %>"
replacements.onUpdateAction = dataType.onUpdate.toUpperCase()
}
}
result[name] = Utils._.template(template)(replacements)
} else {
result[name] = dataType
......@@ -546,8 +618,16 @@ module.exports = (function() {
return fields
},
enableForeignKeyConstraintsQuery: function() {
return false // not supported by dialect
},
disableForeignKeyConstraintsQuery: function() {
return false // not supported by dialect
},
databaseConnectionUri: function(config) {
var template = '<%= protocol %>://<%= user %>:<%= password %>@<%= host %><% if(port) { %>:<%= port %><% } %>/<%= database %>';
var template = '<%= protocol %>://<%= user %>:<%= password %>@<%= host %><% if(port) { %>:<%= port %><% } %>/<%= database %>'
return Utils._.template(template)({
user: encodeURIComponent(config.username),
......@@ -604,6 +684,21 @@ module.exports = (function() {
return "DROP TYPE IF EXISTS " + enumName + "; CREATE TYPE " + enumName + " AS " + dataType.match(/^ENUM\(.+\)/)[0] + "; "
},
toHstore: function(text) {
var obj = {}
, pattern = '("\\\\.|[^"\\\\]*"\s*=|[^=]*)\s*=\s*>\s*("(?:\\.|[^"\\\\])*"|[^,]*)(?:\s*,\s*|$)'
, rex = new RegExp(pattern,'g')
, r = null
while ((r = rex.exec(text)) !== null) {
if (!!r[1] && !!r[2]) {
obj[r[1].replace(/^"/, '').replace(/"$/, '')] = r[2].replace(/^"/, '').replace(/"$/, '')
}
}
return obj
},
padInt: function (i) {
return (i < 10) ? '0' + i.toString() : i.toString()
},
......@@ -642,5 +737,22 @@ module.exports = (function() {
}
}
// Private
var removeSerialsFromHash = function(tableName, attrValueHash) {
var returning = [];
Utils._.forEach(attrValueHash, function(value, key, hash) {
if (tables[tableName] && tables[tableName][key]) {
switch (tables[tableName][key]) {
case 'serial':
delete hash[key]
returning.push(key)
break
}
}
});
return returning;
}
return Utils._.extend(Utils._.clone(require("../query-generator")), QueryGenerator)
})()
var Utils = require("../../utils")
, AbstractQuery = require('../abstract/query')
, DataTypes = require('../../data-types')
module.exports = (function() {
var Query = function(client, sequelize, callee, options) {
......@@ -107,17 +108,29 @@ module.exports = (function() {
} else if (this.send('isShowOrDescribeQuery')) {
this.emit('success', results)
} else if (this.send('isInsertQuery')) {
for (var key in rows[0]) {
if (rows[0].hasOwnProperty(key)) {
this.callee[key] = rows[0][key]
if(this.callee !== null) { // may happen for bulk inserts
for (var key in rows[0]) {
if (rows[0].hasOwnProperty(key)) {
var record = rows[0][key]
if (!!this.callee.daoFactory && !!this.callee.daoFactory.rawAttributes && !!this.callee.daoFactory.rawAttributes[key] && !!this.callee.daoFactory.rawAttributes[key].type && !!this.callee.daoFactory.rawAttributes[key].type.type && this.callee.daoFactory.rawAttributes[key].type.type === DataTypes.HSTORE.type) {
record = this.callee.daoFactory.daoFactoryManager.sequelize.queryInterface.QueryGenerator.toHstore(record)
}
this.callee[key] = record
}
}
}
this.emit('success', this.callee)
} else if (this.send('isUpdateQuery')) {
for (var key in rows[0]) {
if (rows[0].hasOwnProperty(key)) {
this.callee[key] = rows[0][key]
if(this.callee !== null) { // may happen for bulk updates
for (var key in rows[0]) {
if (rows[0].hasOwnProperty(key)) {
var record = rows[0][key]
if (!!this.callee.daoFactory && !!this.callee.daoFactory.rawAttributes && !!this.callee.daoFactory.rawAttributes[key] && !!this.callee.daoFactory.rawAttributes[key].type && !!this.callee.daoFactory.rawAttributes[key].type.type && this.callee.daoFactory.rawAttributes[key].type.type === DataTypes.HSTORE.type) {
record = this.callee.daoFactory.daoFactoryManager.sequelize.queryInterface.QueryGenerator.toHstore(record)
}
this.callee[key] = record
}
}
}
......
module.exports = (function() {
var QueryGenerator = {
addSchema: function(opts) {
throwMethodUndefined('addSchema')
},
/*
Returns a query for creating a table.
Parameters:
......@@ -115,6 +119,14 @@ module.exports = (function() {
},
/*
Returns an insert into command for multiple values.
Parameters: table name + list of hashes of attribute-value-pairs.
*/
bulkInsertQuery: function(tableName, attrValueHashes) {
throwMethodUndefined('bulkInsertQuery')
},
/*
Returns an update query.
Parameters:
- tableName -> Name of the table
......@@ -144,6 +156,19 @@ module.exports = (function() {
},
/*
Returns a bulk deletion query.
Parameters:
- tableName -> Name of the table
- where -> A hash with conditions (e.g. {name: 'foo'})
OR an ID as integer
OR a string with conditions (e.g. 'name="foo"').
If you use a string, you have to escape it on your own.
*/
bulkDeleteQuery: function(tableName, where, options) {
throwMethodUndefined('bulkDeleteQuery')
},
/*
Returns an update query.
Parameters:
- tableName -> Name of the table
......@@ -225,7 +250,22 @@ module.exports = (function() {
*/
findAutoIncrementField: function(factory) {
throwMethodUndefined('findAutoIncrementField')
},
/*
Globally enable foreign key constraints
*/
enableForeignKeyConstraintsQuery: function() {
throwMethodUndefined('enableForeignKeyConstraintsQuery')
},
/*
Globally disable foreign key constraints
*/
disableForeignKeyConstraintsQuery: function() {
throwMethodUndefined('disableForeignKeyConstraintsQuery')
}
}
var throwMethodUndefined = function(methodName) {
......
......@@ -5,7 +5,13 @@ var Utils = require("../../utils")
module.exports = (function() {
var ConnectorManager = function(sequelize) {
this.sequelize = sequelize
this.database = new sqlite3.Database(sequelize.options.storage || ':memory:')
this.database = db = new sqlite3.Database(sequelize.options.storage || ':memory:', function(err) {
if(!err && sequelize.options.foreignKeys !== false) {
// Make it possible to define and use foreign key constraints unless
// explicitly disallowed. It's still opt-in per relation
db.run('PRAGMA FOREIGN_KEYS=ON')
}
})
}
Utils._.extend(ConnectorManager.prototype, require("../connector-manager").prototype)
......
......@@ -24,6 +24,43 @@ module.exports = (function() {
var QueryGenerator = {
options: {},
addQuotes: function(s, quoteChar) {
return Utils.addTicks(s, quoteChar)
},
addSchema: function(opts) {
var tableName = undefined
var schema = (!!opts && !!opts.options && !!opts.options.schema ? opts.options.schema : undefined)
var schemaPrefix = (!!opts && !!opts.options && !!opts.options.schemaPrefix ? opts.options.schemaPrefix : undefined)
if (!!opts && !!opts.tableName) {
tableName = opts.tableName
}
else if (typeof opts === "string") {
tableName = opts
}
if (!schema || schema.toString().trim() === "") {
return tableName
}
return QueryGenerator.addQuotes(schema + (!schemaPrefix ? '.' : schemaPrefix) + tableName)
},
createSchema: function() {
var query = "SELECT name FROM sqlite_master WHERE type='table' and name!='sqlite_sequence';"
return Utils._.template(query)({})
},
dropSchema: function() {
var query = "SELECT name FROM sqlite_master WHERE type='table' and name!='sqlite_sequence';"
return Utils._.template(query)({})
},
showSchemasQuery: function() {
return "SELECT name FROM sqlite_master WHERE type='table' and name!='sqlite_sequence';"
},
createTableQuery: function(tableName, attributes, options) {
options = options || {}
......@@ -88,6 +125,27 @@ module.exports = (function() {
return Utils._.template(query)(replacements)
},
bulkInsertQuery: function(tableName, attrValueHashes) {
var query = "INSERT INTO <%= table %> (<%= attributes %>) VALUES <%= tuples %>;"
, tuples = []
Utils._.forEach(attrValueHashes, function(attrValueHash) {
tuples.push("(" +
Utils._.values(attrValueHash).map(function(value){
return escape((value instanceof Date) ? Utils.toSqlDate(value) : value)
}).join(",") +
")")
})
var replacements = {
table: Utils.addTicks(tableName),
attributes: Object.keys(attrValueHashes[0]).map(function(attr){return Utils.addTicks(attr)}).join(","),
tuples: tuples
}
return Utils._.template(query)(replacements)
},
updateQuery: function(tableName, attrValueHash, where) {
attrValueHash = Utils.removeNullValuesFromHash(attrValueHash, this.options.omitNull)
......@@ -114,8 +172,7 @@ module.exports = (function() {
var query = "DELETE FROM <%= table %> WHERE <%= where %>"
var replacements = {
table: Utils.addTicks(tableName),
where: this.getWhereConditions(where),
limit: Utils.escape(options.limit)
where: MySqlQueryGenerator.getWhereConditions(where)
}
return Utils._.template(query)(replacements)
......@@ -180,6 +237,28 @@ module.exports = (function() {
}
}
if(dataType.references) {
template += " REFERENCES <%= referencesTable %> (<%= referencesKey %>)"
replacements.referencesTable = Utils.addTicks(dataType.references)
if(dataType.referencesKey) {
replacements.referencesKey = Utils.addTicks(dataType.referencesKey)
} else {
replacements.referencesKey = Utils.addTicks('id')
}
if(dataType.onDelete) {
template += " ON DELETE <%= onDeleteAction %>"
replacements.onDeleteAction = dataType.onDelete.toUpperCase()
}
if(dataType.onUpdate) {
template += " ON UPDATE <%= onUpdateAction %>"
replacements.onUpdateAction = dataType.onUpdate.toUpperCase()
}
}
result[name] = Utils._.template(template)(replacements)
} else {
result[name] = dataType
......@@ -205,6 +284,16 @@ module.exports = (function() {
return fields
},
enableForeignKeyConstraintsQuery: function() {
var sql = "PRAGMA foreign_keys = ON;"
return Utils._.template(sql, {})
},
disableForeignKeyConstraintsQuery: function() {
var sql = "PRAGMA foreign_keys = OFF;"
return Utils._.template(sql, {})
},
hashToWhereConditions: function(hash) {
for (var key in hash) {
if (hash.hasOwnProperty(key)) {
......
......@@ -91,7 +91,10 @@ module.exports = (function() {
results = results.map(function(result) {
for (var name in result) {
if (result.hasOwnProperty(name) && (metaData.columnTypes[name] === 'DATETIME')) {
result[name] = new Date(result[name]+'Z'); // Z means UTC
var val = result[name];
if(val !== null) {
result[name] = new Date(val+'Z'); // Z means UTC
}
}
}
return result
......
var util = require("util")
, EventEmitter = require("events").EventEmitter
var util = require("util")
, EventEmitter = require("events").EventEmitter
, proxyEventKeys = ['success', 'error', 'sql']
module.exports = (function() {
var CustomEventEmitter = function(fct) {
......@@ -40,6 +41,14 @@ module.exports = (function() {
return this
}
CustomEventEmitter.prototype.proxy = function(emitter) {
proxyEventKeys.forEach(function (eventKey) {
this.on(eventKey, function (result) {
emitter.emit(eventKey, result)
})
}.bind(this))
}
return CustomEventEmitter;
})()
......@@ -12,7 +12,8 @@ module.exports = (function() {
path: __dirname + '/../migrations',
from: null,
to: null,
logging: console.log
logging: console.log,
filesFilter: /\.js$/
}, options || {})
if (this.options.logging === true) {
......@@ -52,16 +53,26 @@ module.exports = (function() {
migrations.reverse()
}
if (migrations.length === 0) {
self.options.logging("There are no pending migrations.")
} else {
self.options.logging("Running migrations...")
}
migrations.forEach(function(migration) {
var migrationTime
chainer.add(migration, 'execute', [options], {
before: function(migration) {
if (self.options.logging !== false) {
self.options.logging('Executing migration: ' + migration.filename)
self.options.logging(migration.filename)
}
migrationTime = process.hrtime()
},
after: function(migration) {
migrationTime = process.hrtime(migrationTime)
migrationTime = Math.round( (migrationTime[0] * 1000) + (migrationTime[1] / 1000000));
if (self.options.logging !== false) {
self.options.logging('Executed migration: ' + migration.filename)
self.options.logging('Completed in ' + migrationTime + 'ms')
}
},
success: function(migration, callback) {
......@@ -96,7 +107,7 @@ module.exports = (function() {
}
var migrationFiles = fs.readdirSync(this.options.path).filter(function(file) {
return /\.js$/.test(file)
return self.options.filesFilter.test(file)
})
var migrations = migrationFiles.map(function(file) {
......
......@@ -10,6 +10,58 @@ module.exports = (function() {
}
Utils.addEventEmitter(QueryInterface)
QueryInterface.prototype.createSchema = function(schema) {
var sql = this.QueryGenerator.createSchema(schema)
return queryAndEmit.call(this, sql, 'createSchema')
}
QueryInterface.prototype.dropSchema = function(schema) {
var sql = this.QueryGenerator.dropSchema(schema)
return queryAndEmit.call(this, sql, 'dropSchema')
}
QueryInterface.prototype.dropAllSchemas = function() {
var self = this
return new Utils.CustomEventEmitter(function(emitter) {
var chainer = new Utils.QueryChainer()
self.showAllSchemas().success(function(schemaNames) {
schemaNames.forEach(function(schemaName) {
chainer.add(self.dropSchema(schemaName))
})
chainer
.run()
.success(function() {
self.emit('dropAllSchemas', null)
emitter.emit('success', null)
})
.error(function(err) {
self.emit('dropAllSchemas', err)
emitter.emit('error', err)
})
}).error(function(err) {
self.emit('dropAllSchemas', err)
emitter.emit('error', err)
})
}).run()
}
QueryInterface.prototype.showAllSchemas = function() {
var self = this
return new Utils.CustomEventEmitter(function(emitter) {
var showSchemasSql = self.QueryGenerator.showSchemasQuery()
self.sequelize.query(showSchemasSql, null, { raw: true }).success(function(schemaNames) {
self.emit('showAllSchemas', null)
emitter.emit('success', Utils._.flatten(Utils._.map(schemaNames, function(value){ return value.schema_name })))
}).error(function(err) {
self.emit('showAllSchemas', err)
emitter.emit('error', err)
})
}).run()
}
QueryInterface.prototype.createTable = function(tableName, attributes, options) {
var attributeHashes = {}
......@@ -27,8 +79,8 @@ module.exports = (function() {
return queryAndEmit.call(this, sql, 'createTable')
}
QueryInterface.prototype.dropTable = function(tableName) {
var sql = this.QueryGenerator.dropTableQuery(tableName)
QueryInterface.prototype.dropTable = function(tableName, options) {
var sql = this.QueryGenerator.dropTableQuery(tableName, options)
return queryAndEmit.call(this, sql, 'dropTable')
}
......@@ -39,11 +91,17 @@ module.exports = (function() {
var chainer = new Utils.QueryChainer()
self.showAllTables().success(function(tableNames) {
chainer.add(self, 'disableForeignKeyConstraints', [])
tableNames.forEach(function(tableName) {
chainer.add(self.dropTable(tableName))
chainer.add(self, 'dropTable', [tableName, {cascade: true}])
})
chainer.add(self, 'enableForeignKeyConstraints', [])
chainer
.run()
.runSerially()
.success(function() {
self.emit('dropAllTables', null)
emitter.emit('success', null)
......@@ -202,16 +260,31 @@ module.exports = (function() {
})
}
QueryInterface.prototype.bulkInsert = function(tableName, records) {
var sql = this.QueryGenerator.bulkInsertQuery(tableName, records)
return queryAndEmit.call(this, sql, 'bulkInsert')
}
QueryInterface.prototype.update = function(dao, tableName, values, identifier) {
var sql = this.QueryGenerator.updateQuery(tableName, values, identifier)
return queryAndEmit.call(this, [sql, dao], 'update')
}
QueryInterface.prototype.bulkUpdate = function(tableName, values, identifier) {
var sql = this.QueryGenerator.updateQuery(tableName, values, identifier)
return queryAndEmit.call(this, sql, 'bulkUpdate')
}
QueryInterface.prototype.delete = function(dao, tableName, identifier) {
var sql = this.QueryGenerator.deleteQuery(tableName, identifier)
return queryAndEmit.call(this, [sql, dao], 'delete')
}
QueryInterface.prototype.bulkDelete = function(tableName, identifier) {
var sql = this.QueryGenerator.deleteQuery(tableName, identifier, {limit: null})
return queryAndEmit.call(this, sql, 'bulkDelete')
}
QueryInterface.prototype.select = function(factory, tableName, options, queryOptions) {
options = options || {}
......@@ -244,6 +317,10 @@ module.exports = (function() {
result = parseInt(result)
}
if (options && options.parseFloat) {
result = parseFloat(result)
}
self.emit('rawSelect', null)
emitter.emit('success', result)
})
......@@ -257,6 +334,30 @@ module.exports = (function() {
}).run()
}
QueryInterface.prototype.enableForeignKeyConstraints = function() {
var sql = this.QueryGenerator.enableForeignKeyConstraintsQuery()
if(sql) {
return queryAndEmit.call(this, sql, 'enableForeignKeyConstraints')
} else {
return new Utils.CustomEventEmitter(function(emitter) {
this.emit('enableForeignKeyConstraints', null)
emitter.emit('success')
}).run()
}
}
QueryInterface.prototype.disableForeignKeyConstraints = function() {
var sql = this.QueryGenerator.disableForeignKeyConstraintsQuery()
if(sql){
return queryAndEmit.call(this, sql, 'disableForeignKeyConstraints')
} else {
return new Utils.CustomEventEmitter(function(emitter) {
this.emit('disableForeignKeyConstraints', null)
emitter.emit('success')
}).run()
}
}
// private
var queryAndEmit = function(sqlOrQueryParams, methodName, options, emitter) {
......
var Utils = require("./utils")
var url = require("url")
, Utils = require("./utils")
, DAOFactory = require("./dao-factory")
, DataTypes = require('./data-types')
, DAOFactoryManager = require("./dao-factory-manager")
......@@ -43,6 +44,27 @@ module.exports = (function() {
@constructor
*/
var Sequelize = function(database, username, password, options) {
var urlParts
options = options || {}
if (arguments.length === 1 || (arguments.length === 2 && typeof username === 'object')) {
options = username || {}
urlParts = url.parse(arguments[0])
database = urlParts.path.replace(/^\//, '')
dialect = urlParts.protocol
options.dialect = urlParts.protocol.replace(/:$/, '')
options.host = urlParts.hostname
if (urlParts.port) {
options.port = urlParts.port
}
if (urlParts.auth) {
username = urlParts.auth.split(':')[0]
password = urlParts.auth.split(':')[1]
}
}
this.options = Utils._.extend({
dialect: 'mysql',
host: 'localhost',
......@@ -130,6 +152,16 @@ module.exports = (function() {
options = options || {}
var globalOptions = this.options
// If you don't specify a valid data type lets help you debug it
Utils._.each(attributes, function(dataType, name){
if (Utils.isHash(dataType)) {
dataType = dataType.type
}
if (dataType === undefined) {
throw new Error('Unrecognized data type for field '+ name)
}
})
if (globalOptions.define) {
options = Utils._.extend({}, globalOptions.define, options)
Utils._(['classMethods', 'instanceMethods']).each(function(key) {
......@@ -171,7 +203,7 @@ module.exports = (function() {
Sequelize.prototype.query = function(sql, callee, options, replacements) {
if (arguments.length === 4) {
sql = Utils.format([sql].concat(replacements))
sql = Utils.format([sql].concat(replacements), this.options.dialect)
} else if (arguments.length === 3) {
options = options
} else if (arguments.length === 2) {
......@@ -189,6 +221,38 @@ module.exports = (function() {
return this.connectorManager.query(sql, callee, options)
}
Sequelize.prototype.createSchema = function(schema) {
var chainer = new Utils.QueryChainer()
chainer.add(this.getQueryInterface().createSchema(schema))
return chainer.run()
}
Sequelize.prototype.showAllSchemas = function() {
var chainer = new Utils.QueryChainer()
chainer.add(this.getQueryInterface().showAllSchemas())
return chainer.run()
}
Sequelize.prototype.dropSchema = function(schema) {
var chainer = new Utils.QueryChainer()
chainer.add(this.getQueryInterface().dropSchema(schema))
return chainer.run()
}
Sequelize.prototype.dropAllSchemas = function() {
var self = this
var chainer = new Utils.QueryChainer()
chainer.add(self.getQueryInterface().dropAllSchemas())
return chainer.run()
}
Sequelize.prototype.sync = function(options) {
options = options || {}
......@@ -198,11 +262,14 @@ module.exports = (function() {
var chainer = new Utils.QueryChainer()
this.daoFactoryManager.daos.forEach(function(dao) {
chainer.add(dao.sync(options))
// Topologically sort by foreign key constraints to give us an appropriate
// creation order
this.daoFactoryManager.forEachDAO(function(dao) {
chainer.add(dao, 'sync', [options])
})
return chainer.run()
return chainer.runSerially()
}
Sequelize.prototype.drop = function() {
......
......@@ -7,7 +7,7 @@ SqlString.escapeId = function (val, forbidQualified) {
return '`' + val.replace(/`/g, '``').replace(/\./g, '`.`') + '`';
};
SqlString.escape = function(val, stringifyObjects, timeZone) {
SqlString.escape = function(val, stringifyObjects, timeZone, dialect) {
if (val === undefined || val === null) {
return 'NULL';
}
......@@ -37,17 +37,22 @@ SqlString.escape = function(val, stringifyObjects, timeZone) {
}
}
val = val.replace(/[\0\n\r\b\t\\\'\"\x1a]/g, function(s) {
switch(s) {
case "\0": return "\\0";
case "\n": return "\\n";
case "\r": return "\\r";
case "\b": return "\\b";
case "\t": return "\\t";
case "\x1a": return "\\Z";
default: return "\\"+s;
}
});
if (dialect == "postgres") {
// http://www.postgresql.org/docs/8.2/static/sql-syntax-lexical.html#SQL-SYNTAX-STRINGS
val = val.replace(/'/g, "''");
} else {
val = val.replace(/[\0\n\r\b\t\\\'\"\x1a]/g, function(s) {
switch(s) {
case "\0": return "\\0";
case "\n": return "\\n";
case "\r": return "\\r";
case "\b": return "\\b";
case "\t": return "\\t";
case "\x1a": return "\\Z";
default: return "\\"+s;
}
});
}
return "'"+val+"'";
};
......@@ -58,7 +63,7 @@ SqlString.arrayToList = function(array, timeZone) {
}).join(', ');
};
SqlString.format = function(sql, values, timeZone) {
SqlString.format = function(sql, values, timeZone, dialect) {
values = [].concat(values);
return sql.replace(/\?/g, function(match) {
......@@ -66,7 +71,7 @@ SqlString.format = function(sql, values, timeZone) {
return match;
}
return SqlString.escape(values.shift(), false, timeZone);
return SqlString.escape(values.shift(), false, timeZone, dialect);
});
};
......
......@@ -4,7 +4,7 @@ var util = require("util")
var Utils = module.exports = {
_: (function() {
var _ = require("underscore")
var _ = require("lodash")
, _s = require('underscore.string')
_.mixin(_s.exports())
......@@ -47,8 +47,9 @@ var Utils = module.exports = {
escape: function(s) {
return SqlString.escape(s, true, "local").replace(/\\"/g, '"')
},
format: function(arr) {
return SqlString.format(arr.shift(), arr)
format: function(arr, dialect) {
var timeZone = null;
return SqlString.format(arr.shift(), arr, timeZone, dialect)
},
isHash: function(obj) {
return Utils._.isObject(obj) && !Array.isArray(obj);
......
{
"name": "sequelize",
"description": "Multi dialect ORM for Node.JS",
"version": "1.6.0",
"version": "1.7.0-alpha1",
"author": "Sascha Depold <sascha@depold.com>",
"contributors": [
{
......@@ -23,23 +23,24 @@
],
"repository": {
"type": "git",
"url": "https://github.com/sdepold/sequelize.git"
"url": "https://github.com/sequelize/sequelize.git"
},
"bugs": {
"url": "https://github.com/sdepold/sequelize/issues"
"url": "https://github.com/sequelize/sequelize/issues"
},
"dependencies": {
"underscore": "~1.4.0",
"lodash": "~1.2.1",
"underscore.string": "~2.3.0",
"lingo": "~0.0.5",
"validator": "0.4.x",
"validator": "1.1.1",
"moment": "~1.7.0",
"commander": "~0.6.0",
"generic-pool": "1.0.9",
"dottie": "0.0.6-1"
"dottie": "0.0.6-1",
"toposort-class": "0.1.4"
},
"devDependencies": {
"jasmine-node": "1.0.17",
"jasmine-node": "1.5.0",
"sqlite3": "~2.1.5",
"mysql": "~2.0.0-alpha7",
"pg": "~0.10.2",
......@@ -56,13 +57,13 @@
"main": "index",
"scripts": {
"test": "npm run test-jasmine && npm run test-buster",
"test-jasmine": "./node_modules/.bin/jasmine-node spec-jasmine/",
"test-jasmine": "jasmine-node spec-jasmine/",
"test-buster": "npm run test-buster-mysql && npm run test-buster-postgres && npm run test-buster-postgres-native && npm run test-buster-sqlite",
"test-buster-travis": "./node_modules/.bin/buster-test",
"test-buster-mysql": "DIALECT=mysql ./node_modules/.bin/buster-test",
"test-buster-postgres": "DIALECT=postgres ./node_modules/.bin/buster-test",
"test-buster-postgres-native": "DIALECT=postgres-native ./node_modules/.bin/buster-test",
"test-buster-sqlite": "DIALECT=sqlite ./node_modules/.bin/buster-test",
"test-buster-travis": "buster-test",
"test-buster-mysql": "DIALECT=mysql buster-test",
"test-buster-postgres": "DIALECT=postgres buster-test",
"test-buster-postgres-native": "DIALECT=postgres-native buster-test",
"test-buster-sqlite": "DIALECT=sqlite buster-test",
"docs": "node_modules/.bin/yuidoc . -o docs"
},
"bin": {
......
var config = require("../config/config")
, Sequelize = require("../../index")
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { logging: false })
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { pool: config.mysql.pool, logging: false, host: config.mysql.host, port: config.mysql.port })
, Helpers = new (require("../config/helpers"))(sequelize)
describe('BelongsTo', function() {
......
var config = require("../config/config")
, Sequelize = require("../../index")
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { logging: false })
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { pool: config.mysql.pool, logging: false, host: config.mysql.host, port: config.mysql.port })
, Helpers = new (require("../config/helpers"))(sequelize)
describe('HasMany', function() {
......@@ -10,7 +10,7 @@ describe('HasMany', function() {
, Helpers = null
var setup = function() {
sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { logging: false })
sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { pool: config.mysql.pool, logging: false, host: config.mysql.host, port: config.mysql.port })
Helpers = new (require("../config/helpers"))(sequelize)
Helpers.dropAllTables()
......
var config = require("../config/config")
, Sequelize = require("../../index")
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { logging: false })
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { pool: config.mysql.pool, logging: false, host: config.mysql.host, port: config.mysql.port })
, Helpers = new (require("../config/helpers"))(sequelize)
describe('HasOne', function() {
......
......@@ -90,25 +90,6 @@ describe('DAOFactory', function() {
})
})
it('marks the database entry as deleted if dao is paranoid', function() {
Helpers.async(function(done) {
User = sequelize.define('User', {
name: Sequelize.STRING, bio: Sequelize.TEXT
}, { paranoid:true })
User.sync({ force: true }).success(done)
})
Helpers.async(function(done) {
User.create({ name: 'asd', bio: 'asd' }).success(function(u) {
expect(u.deletedAt).toBeNull()
u.destroy().success(function(u) {
expect(u.deletedAt).toBeTruthy()
done()
})
})
})
})
it('allows sql logging of update statements', function() {
Helpers.async(function(done) {
User = sequelize.define('User', {
......
......@@ -13,7 +13,8 @@ describe('DAO', function() {
{
logging: false,
dialect: dialect,
port: config[dialect].port
port: config[dialect].port,
host: config[dialect].host
}
)
, Helpers = new (require("./config/helpers"))(sequelize)
......
var config = require("../config/config")
, Sequelize = require("../../index")
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { pool: config.mysql.pool, logging: false })
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { pool: config.mysql.pool, logging: false, host: config.mysql.host, port: config.mysql.port })
, Helpers = new (require("../config/helpers"))(sequelize)
describe('HasMany', function() {
......
var config = require("../config/config")
, Sequelize = require("../../index")
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { pool: config.mysql.pool, logging: false })
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { pool: config.mysql.pool, logging: false, host: config.mysql.host, port: config.mysql.port })
, Helpers = new (require("../config/helpers"))(sequelize)
describe('Associations', function() {
......
var config = require("../config/config")
, Sequelize = require("../../index")
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { pool: config.mysql.pool, logging: false })
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { pool: config.mysql.pool, logging: false, host: config.mysql.host, port: config.mysql.port })
, Helpers = new (require("../config/helpers"))(sequelize)
describe('ConnectorManager', function() {
......
var config = require("../config/config")
, Sequelize = require("../../index")
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { pool: config.mysql.pool, logging: false })
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { pool: config.mysql.pool, logging: false, host: config.mysql.host, port: config.mysql.port })
, Helpers = new (require("../config/helpers"))(sequelize)
describe('DAOFactory', function() {
......
var config = require("../config/config")
, Sequelize = require("../../index")
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { pool: config.mysql.pool, logging: false })
, sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, { pool: config.mysql.pool, logging: false, host: config.mysql.host, port: config.mysql.port })
, Helpers = new (require("../config/helpers"))(sequelize)
, QueryGenerator = require("../../lib/dialects/mysql/query-generator")
, util = require("util")
......@@ -10,6 +10,62 @@ describe('QueryGenerator', function() {
afterEach(function() { Helpers.drop() })
var suites = {
attributesToSQL: [
{
arguments: [{id: 'INTEGER'}],
expectation: {id: 'INTEGER'}
},
{
arguments: [{id: 'INTEGER', foo: 'VARCHAR(255)'}],
expectation: {id: 'INTEGER', foo: 'VARCHAR(255)'}
},
{
arguments: [{id: {type: 'INTEGER'}}],
expectation: {id: 'INTEGER'}
},
{
arguments: [{id: {type: 'INTEGER', allowNull: false}}],
expectation: {id: 'INTEGER NOT NULL'}
},
{
arguments: [{id: {type: 'INTEGER', allowNull: true}}],
expectation: {id: 'INTEGER'}
},
{
arguments: [{id: {type: 'INTEGER', primaryKey: true, autoIncrement: true}}],
expectation: {id: 'INTEGER auto_increment PRIMARY KEY'}
},
{
arguments: [{id: {type: 'INTEGER', defaultValue: 0}}],
expectation: {id: 'INTEGER DEFAULT 0'}
},
{
arguments: [{id: {type: 'INTEGER', unique: true}}],
expectation: {id: 'INTEGER UNIQUE'}
},
{
arguments: [{id: {type: 'INTEGER', references: 'Bar'}}],
expectation: {id: 'INTEGER REFERENCES `Bar` (`id`)'}
},
{
arguments: [{id: {type: 'INTEGER', references: 'Bar', referencesKey: 'pk'}}],
expectation: {id: 'INTEGER REFERENCES `Bar` (`pk`)'}
},
{
arguments: [{id: {type: 'INTEGER', references: 'Bar', onDelete: 'CASCADE'}}],
expectation: {id: 'INTEGER REFERENCES `Bar` (`id`) ON DELETE CASCADE'}
},
{
arguments: [{id: {type: 'INTEGER', references: 'Bar', onUpdate: 'RESTRICT'}}],
expectation: {id: 'INTEGER REFERENCES `Bar` (`id`) ON UPDATE RESTRICT'}
},
{
arguments: [{id: {type: 'INTEGER', allowNull: false, autoIncrement: true, defaultValue: 1, references: 'Bar', onDelete: 'CASCADE', onUpdate: 'RESTRICT'}}],
expectation: {id: 'INTEGER NOT NULL auto_increment DEFAULT 1 REFERENCES `Bar` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT'}
},
],
createTableQuery: [
{
arguments: ['myTable', {title: 'VARCHAR(255)', name: 'VARCHAR(255)'}],
......@@ -26,6 +82,14 @@ describe('QueryGenerator', function() {
{
arguments: ['myTable', {title: 'ENUM("A", "B", "C")', name: 'VARCHAR(255)'}, {charset: 'latin1'}],
expectation: "CREATE TABLE IF NOT EXISTS `myTable` (`title` ENUM(\"A\", \"B\", \"C\"), `name` VARCHAR(255)) ENGINE=InnoDB DEFAULT CHARSET=latin1;"
},
{
arguments: ['myTable', {title: 'VARCHAR(255)', name: 'VARCHAR(255)', id: 'INTEGER PRIMARY KEY'}],
expectation: "CREATE TABLE IF NOT EXISTS `myTable` (`title` VARCHAR(255), `name` VARCHAR(255), `id` INTEGER , PRIMARY KEY (`id`)) ENGINE=InnoDB;"
},
{
arguments: ['myTable', {title: 'VARCHAR(255)', name: 'VARCHAR(255)', otherId: 'INTEGER REFERENCES `otherTable` (`id`) ON DELETE CASCADE ON UPDATE NO ACTION'}],
expectation: "CREATE TABLE IF NOT EXISTS `myTable` (`title` VARCHAR(255), `name` VARCHAR(255), `otherId` INTEGER, FOREIGN KEY (`otherId`) REFERENCES `otherTable` (`id`) ON DELETE CASCADE ON UPDATE NO ACTION) ENGINE=InnoDB;"
}
],
......@@ -102,6 +166,26 @@ describe('QueryGenerator', function() {
arguments: ['myTable', {offset: 2}],
expectation: "SELECT * FROM `myTable`;",
context: QueryGenerator
}, {
title: 'multiple where arguments',
arguments: ['myTable', {where: {boat: 'canoe', weather: 'cold'}}],
expectation: "SELECT * FROM `myTable` WHERE `myTable`.`boat`='canoe' AND `myTable`.`weather`='cold';",
context: QueryGenerator
}, {
title: 'no where arguments (object)',
arguments: ['myTable', {where: {}}],
expectation: "SELECT * FROM `myTable` WHERE 1=1;",
context: QueryGenerator
}, {
title: 'no where arguments (string)',
arguments: ['myTable', {where: ''}],
expectation: "SELECT * FROM `myTable` WHERE 1=1;",
context: QueryGenerator
}, {
title: 'no where arguments (null)',
arguments: ['myTable', {where: null}],
expectation: "SELECT * FROM `myTable` WHERE 1=1;",
context: QueryGenerator
}
],
......@@ -133,6 +217,46 @@ describe('QueryGenerator', function() {
arguments: ['myTable', {name: 'foo', foo: 1, nullValue: undefined}],
expectation: "INSERT INTO `myTable` (`name`,`foo`) VALUES ('foo',1);",
context: {options: {omitNull: true}}
}, {
arguments: ['myTable', {foo: false}],
expectation: "INSERT INTO `myTable` (`foo`) VALUES (0);"
}, {
arguments: ['myTable', {foo: true}],
expectation: "INSERT INTO `myTable` (`foo`) VALUES (1);"
}
],
bulkInsertQuery: [
{
arguments: ['myTable', [{name: 'foo'}, {name: 'bar'}]],
expectation: "INSERT INTO `myTable` (`name`) VALUES ('foo'),('bar');"
}, {
arguments: ['myTable', [{name: "foo';DROP TABLE myTable;"}, {name: 'bar'}]],
expectation: "INSERT INTO `myTable` (`name`) VALUES ('foo\\';DROP TABLE myTable;'),('bar');"
}, {
arguments: ['myTable', [{name: 'foo', birthday: new Date(Date.UTC(2011, 2, 27, 10, 1, 55))}, {name: 'bar', birthday: new Date(Date.UTC(2012, 2, 27, 10, 1, 55))}]],
expectation: "INSERT INTO `myTable` (`name`,`birthday`) VALUES ('foo','2011-03-27 10:01:55'),('bar','2012-03-27 10:01:55');"
}, {
arguments: ['myTable', [{name: 'foo', foo: 1}, {name: 'bar', foo: 2}]],
expectation: "INSERT INTO `myTable` (`name`,`foo`) VALUES ('foo',1),('bar',2);"
}, {
arguments: ['myTable', [{name: 'foo', foo: 1, nullValue: null}, {name: 'bar', nullValue: null}]],
expectation: "INSERT INTO `myTable` (`name`,`foo`,`nullValue`) VALUES ('foo',1,NULL),('bar',NULL);"
}, {
arguments: ['myTable', [{name: 'foo', foo: 1, nullValue: null}, {name: 'bar', foo: 2, nullValue: null}]],
expectation: "INSERT INTO `myTable` (`name`,`foo`,`nullValue`) VALUES ('foo',1,NULL),('bar',2,NULL);",
context: {options: {omitNull: false}}
}, {
arguments: ['myTable', [{name: 'foo', foo: 1, nullValue: null}, {name: 'bar', foo: 2, nullValue: null}]],
expectation: "INSERT INTO `myTable` (`name`,`foo`,`nullValue`) VALUES ('foo',1,NULL),('bar',2,NULL);",
context: {options: {omitNull: true}} // Note: We don't honour this because it makes little sense when some rows may have nulls and others not
}, {
arguments: ['myTable', [{name: 'foo', foo: 1, nullValue: undefined}, {name: 'bar', foo: 2, undefinedValue: undefined}]],
expectation: "INSERT INTO `myTable` (`name`,`foo`,`nullValue`) VALUES ('foo',1,NULL),('bar',2,NULL);",
context: {options: {omitNull: true}} // Note: As above
}, {
arguments: ['myTable', [{name: "foo", value: true}, {name: 'bar', value: false}]],
expectation: "INSERT INTO `myTable` (`name`,`value`) VALUES ('foo',1),('bar',0);"
}
],
......@@ -160,6 +284,12 @@ describe('QueryGenerator', function() {
arguments: ['myTable', {bar: 2, nullValue: null}, {name: 'foo'}],
expectation: "UPDATE `myTable` SET `bar`=2 WHERE `name`='foo'",
context: {options: {omitNull: true}}
}, {
arguments: ['myTable', {bar: false}, {name: 'foo'}],
expectation: "UPDATE `myTable` SET `bar`=0 WHERE `name`='foo'"
}, {
arguments: ['myTable', {bar: true}, {name: 'foo'}],
expectation: "UPDATE `myTable` SET `bar`=1 WHERE `name`='foo'"
}
],
......@@ -176,6 +306,9 @@ describe('QueryGenerator', function() {
}, {
arguments: ['myTable', {name: "foo';DROP TABLE myTable;"}, {limit: 10}],
expectation: "DELETE FROM `myTable` WHERE `name`='foo\\';DROP TABLE myTable;' LIMIT 10"
}, {
arguments: ['myTable', {name: 'foo'}, {limit: null}],
expectation: "DELETE FROM `myTable` WHERE `name`='foo'"
}
],
......@@ -227,6 +360,27 @@ describe('QueryGenerator', function() {
{
arguments: [{ id: [] }],
expectation: "`id` IN (NULL)"
},
{
arguments: [{ maple: false, bacon: true }],
expectation: "`maple`=0 AND `bacon`=1"
},
{
arguments: [{ beaver: [false, true] }],
expectation: "`beaver` IN (0,1)"
},
{
arguments: [{birthday: new Date(Date.UTC(2011, 6, 1, 10, 1, 55))}],
expectation: "`birthday`='2011-07-01 10:01:55'"
},
{
arguments: [{ birthday: new Date(Date.UTC(2011, 6, 1, 10, 1, 55)),
otherday: new Date(Date.UTC(2013, 6, 2, 10, 1, 22)) }],
expectation: "`birthday`='2011-07-01 10:01:55' AND `otherday`='2013-07-02 10:01:22'"
},
{
arguments: [{ birthday: [new Date(Date.UTC(2011, 6, 1, 10, 1, 55)), new Date(Date.UTC(2013, 6, 2, 10, 1, 22))] }],
expectation: "`birthday` IN ('2011-07-01 10:01:55','2013-07-02 10:01:22')"
}
]
}
......
......@@ -14,6 +14,62 @@ describe('QueryGenerator', function() {
afterEach(function() { Helpers.drop() })
var suites = {
attributesToSQL: [
{
arguments: [{id: 'INTEGER'}],
expectation: {id: 'INTEGER'}
},
{
arguments: [{id: 'INTEGER', foo: 'VARCHAR(255)'}],
expectation: {id: 'INTEGER', foo: 'VARCHAR(255)'}
},
{
arguments: [{id: {type: 'INTEGER'}}],
expectation: {id: 'INTEGER'}
},
{
arguments: [{id: {type: 'INTEGER', allowNull: false}}],
expectation: {id: 'INTEGER NOT NULL'}
},
{
arguments: [{id: {type: 'INTEGER', allowNull: true}}],
expectation: {id: 'INTEGER'}
},
{
arguments: [{id: {type: 'INTEGER', primaryKey: true, autoIncrement: true}}],
expectation: {id: 'INTEGER SERIAL PRIMARY KEY'}
},
{
arguments: [{id: {type: 'INTEGER', defaultValue: 0}}],
expectation: {id: 'INTEGER DEFAULT 0'}
},
{
arguments: [{id: {type: 'INTEGER', unique: true}}],
expectation: {id: 'INTEGER UNIQUE'}
},
{
arguments: [{id: {type: 'INTEGER', references: 'Bar'}}],
expectation: {id: 'INTEGER REFERENCES "Bar" ("id")'}
},
{
arguments: [{id: {type: 'INTEGER', references: 'Bar', referencesKey: 'pk'}}],
expectation: {id: 'INTEGER REFERENCES "Bar" ("pk")'}
},
{
arguments: [{id: {type: 'INTEGER', references: 'Bar', onDelete: 'CASCADE'}}],
expectation: {id: 'INTEGER REFERENCES "Bar" ("id") ON DELETE CASCADE'}
},
{
arguments: [{id: {type: 'INTEGER', references: 'Bar', onUpdate: 'RESTRICT'}}],
expectation: {id: 'INTEGER REFERENCES "Bar" ("id") ON UPDATE RESTRICT'}
},
{
arguments: [{id: {type: 'INTEGER', allowNull: false, defaultValue: 1, references: 'Bar', onDelete: 'CASCADE', onUpdate: 'RESTRICT'}}],
expectation: {id: 'INTEGER NOT NULL DEFAULT 1 REFERENCES "Bar" ("id") ON DELETE CASCADE ON UPDATE RESTRICT'}
},
],
createTableQuery: [
{
arguments: ['myTable', {title: 'VARCHAR(255)', name: 'VARCHAR(255)'}],
......@@ -26,6 +82,14 @@ describe('QueryGenerator', function() {
{
arguments: ['myTable', {title: 'ENUM("A", "B", "C")', name: 'VARCHAR(255)'}],
expectation: "DROP TYPE IF EXISTS \"enum_myTable_title\"; CREATE TYPE \"enum_myTable_title\" AS ENUM(\"A\", \"B\", \"C\"); CREATE TABLE IF NOT EXISTS \"myTable\" (\"title\" \"enum_myTable_title\", \"name\" VARCHAR(255));"
},
{
arguments: ['myTable', {title: 'VARCHAR(255)', name: 'VARCHAR(255)', id: 'INTEGER PRIMARY KEY'}],
expectation: "CREATE TABLE IF NOT EXISTS \"myTable\" (\"title\" VARCHAR(255), \"name\" VARCHAR(255), \"id\" INTEGER , PRIMARY KEY (\"id\"));"
},
{
arguments: ['myTable', {title: 'VARCHAR(255)', name: 'VARCHAR(255)', otherId: 'INTEGER REFERENCES "otherTable" ("id") ON DELETE CASCADE ON UPDATE NO ACTION'}],
expectation: "CREATE TABLE IF NOT EXISTS \"myTable\" (\"title\" VARCHAR(255), \"name\" VARCHAR(255), \"otherId\" INTEGER REFERENCES \"otherTable\" (\"id\") ON DELETE CASCADE ON UPDATE NO ACTION);"
}
],
......@@ -37,6 +101,14 @@ describe('QueryGenerator', function() {
{
arguments: ['mySchema.myTable'],
expectation: "DROP TABLE IF EXISTS \"mySchema\".\"myTable\";"
},
{
arguments: ['myTable', {cascade: true}],
expectation: "DROP TABLE IF EXISTS \"myTable\" CASCADE;"
},
{
arguments: ['mySchema.myTable', {cascade: true}],
expectation: "DROP TABLE IF EXISTS \"mySchema\".\"myTable\" CASCADE;"
}
],
......@@ -136,6 +208,46 @@ describe('QueryGenerator', function() {
}
],
bulkInsertQuery: [
{
arguments: ['myTable', [{name: 'foo'}, {name: 'bar'}]],
expectation: "INSERT INTO \"myTable\" (\"name\") VALUES ('foo'),('bar') RETURNING *;"
}, {
arguments: ['myTable', [{name: "foo';DROP TABLE myTable;"}, {name: 'bar'}]],
expectation: "INSERT INTO \"myTable\" (\"name\") VALUES ('foo'';DROP TABLE myTable;'),('bar') RETURNING *;"
}, {
arguments: ['myTable', [{name: 'foo', birthday: new Date(Date.UTC(2011, 2, 27, 10, 1, 55))}, {name: 'bar', birthday: new Date(Date.UTC(2012, 2, 27, 10, 1, 55))}]],
expectation: "INSERT INTO \"myTable\" (\"name\",\"birthday\") VALUES ('foo','2011-03-27 10:01:55.0Z'),('bar','2012-03-27 10:01:55.0Z') RETURNING *;"
}, {
arguments: ['myTable', [{name: 'foo', foo: 1}, {name: 'bar', foo: 2}]],
expectation: "INSERT INTO \"myTable\" (\"name\",\"foo\") VALUES ('foo',1),('bar',2) RETURNING *;"
}, {
arguments: ['myTable', [{name: 'foo', nullValue: null}, {name: 'bar', nullValue: null}]],
expectation: "INSERT INTO \"myTable\" (\"name\",\"nullValue\") VALUES ('foo',NULL),('bar',NULL) RETURNING *;"
}, {
arguments: ['myTable', [{name: 'foo', nullValue: null}, {name: 'bar', nullValue: null}]],
expectation: "INSERT INTO \"myTable\" (\"name\",\"nullValue\") VALUES ('foo',NULL),('bar',NULL) RETURNING *;",
context: {options: {omitNull: false}}
}, {
arguments: ['myTable', [{name: 'foo', nullValue: null}, {name: 'bar', nullValue: null}]],
expectation: "INSERT INTO \"myTable\" (\"name\",\"nullValue\") VALUES ('foo',NULL),('bar',NULL) RETURNING *;",
context: {options: {omitNull: true}} // Note: We don't honour this because it makes little sense when some rows may have nulls and others not
}, {
arguments: ['myTable', [{name: 'foo', nullValue: undefined}, {name: 'bar', nullValue: undefined}]],
expectation: "INSERT INTO \"myTable\" (\"name\",\"nullValue\") VALUES ('foo',NULL),('bar',NULL) RETURNING *;",
context: {options: {omitNull: true}} // Note: As above
}, {
arguments: ['mySchema.myTable', [{name: 'foo'}, {name: 'bar'}]],
expectation: "INSERT INTO \"mySchema\".\"myTable\" (\"name\") VALUES ('foo'),('bar') RETURNING *;"
}, {
arguments: ['mySchema.myTable', [{name: JSON.stringify({info: 'Look ma a " quote'})}, {name: JSON.stringify({info: 'Look ma another " quote'})}]],
expectation: "INSERT INTO \"mySchema\".\"myTable\" (\"name\") VALUES ('{\"info\":\"Look ma a \\\" quote\"}'),('{\"info\":\"Look ma another \\\" quote\"}') RETURNING *;"
}, {
arguments: ['mySchema.myTable', [{name: "foo';DROP TABLE mySchema.myTable;"}, {name: 'bar'}]],
expectation: "INSERT INTO \"mySchema\".\"myTable\" (\"name\") VALUES ('foo'';DROP TABLE mySchema.myTable;'),('bar') RETURNING *;"
}
],
updateQuery: [
{
arguments: ['myTable', {name: 'foo', birthday: new Date(Date.UTC(2011, 2, 27, 10, 1, 55))}, {id: 2}],
......@@ -192,6 +304,9 @@ describe('QueryGenerator', function() {
}, {
arguments: ['mySchema.myTable', {name: "foo';DROP TABLE mySchema.myTable;"}, {limit: 10}],
expectation: "DELETE FROM \"mySchema\".\"myTable\" WHERE \"id\" IN (SELECT \"id\" FROM \"mySchema\".\"myTable\" WHERE \"name\"='foo'';DROP TABLE mySchema.myTable;' LIMIT 10)"
}, {
arguments: ['myTable', {name: 'foo'}, {limit: null}],
expectation: "DELETE FROM \"myTable\" WHERE \"id\" IN (SELECT \"id\" FROM \"myTable\" WHERE \"name\"='foo')"
}
],
......
......@@ -8,7 +8,17 @@ describe('Sequelize', function() {
var setup = function(options) {
options = options || {logging: false}
options = options || {}
if (!options.hasOwnProperty('pool'))
options.pool = config.mysql.pool
if (!options.hasOwnProperty('logging'))
options.logging = false
if (!options.hasOwnProperty('host'))
options.host = config.mysql.host
if (!options.hasOwnProperty('port'))
options.port = config.mysql.port
sequelize = new Sequelize(config.mysql.database, config.mysql.username, config.mysql.password, options)
Helpers = new (require("./config/helpers"))(sequelize)
......
......@@ -9,6 +9,81 @@ describe('QueryGenerator', function() {
afterEach(function() { Helpers.drop() })
var suites = {
attributesToSQL: [
{
arguments: [{id: 'INTEGER'}],
expectation: {id: 'INTEGER'}
},
{
arguments: [{id: 'INTEGER', foo: 'VARCHAR(255)'}],
expectation: {id: 'INTEGER', foo: 'VARCHAR(255)'}
},
{
arguments: [{id: {type: 'INTEGER'}}],
expectation: {id: 'INTEGER'}
},
{
arguments: [{id: {type: 'INTEGER', allowNull: false}}],
expectation: {id: 'INTEGER NOT NULL'}
},
{
arguments: [{id: {type: 'INTEGER', allowNull: true}}],
expectation: {id: 'INTEGER'}
},
{
arguments: [{id: {type: 'INTEGER', primaryKey: true, autoIncrement: true}}],
expectation: {id: 'INTEGER PRIMARY KEY AUTOINCREMENT'}
},
{
arguments: [{id: {type: 'INTEGER', defaultValue: 0}}],
expectation: {id: 'INTEGER DEFAULT 0'}
},
{
arguments: [{id: {type: 'INTEGER', unique: true}}],
expectation: {id: 'INTEGER UNIQUE'}
},
{
arguments: [{id: {type: 'INTEGER', references: 'Bar'}}],
expectation: {id: 'INTEGER REFERENCES `Bar` (`id`)'}
},
{
arguments: [{id: {type: 'INTEGER', references: 'Bar', referencesKey: 'pk'}}],
expectation: {id: 'INTEGER REFERENCES `Bar` (`pk`)'}
},
{
arguments: [{id: {type: 'INTEGER', references: 'Bar', onDelete: 'CASCADE'}}],
expectation: {id: 'INTEGER REFERENCES `Bar` (`id`) ON DELETE CASCADE'}
},
{
arguments: [{id: {type: 'INTEGER', references: 'Bar', onUpdate: 'RESTRICT'}}],
expectation: {id: 'INTEGER REFERENCES `Bar` (`id`) ON UPDATE RESTRICT'}
},
{
arguments: [{id: {type: 'INTEGER', allowNull: false, defaultValue: 1, references: 'Bar', onDelete: 'CASCADE', onUpdate: 'RESTRICT'}}],
expectation: {id: 'INTEGER NOT NULL DEFAULT 1 REFERENCES `Bar` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT'}
},
],
createTableQuery: [
{
arguments: ['myTable', {title: 'VARCHAR(255)', name: 'VARCHAR(255)'}],
expectation: "CREATE TABLE IF NOT EXISTS `myTable` (`title` VARCHAR(255), `name` VARCHAR(255));"
},
{
arguments: ['myTable', {title: 'ENUM("A", "B", "C")', name: 'VARCHAR(255)'}],
expectation: "CREATE TABLE IF NOT EXISTS `myTable` (`title` ENUM(\"A\", \"B\", \"C\"), `name` VARCHAR(255));"
},
{
arguments: ['myTable', {title: 'VARCHAR(255)', name: 'VARCHAR(255)', id: 'INTEGER PRIMARY KEY'}],
expectation: "CREATE TABLE IF NOT EXISTS `myTable` (`title` VARCHAR(255), `name` VARCHAR(255), `id` INTEGER PRIMARY KEY);"
},
{
arguments: ['myTable', {title: 'VARCHAR(255)', name: 'VARCHAR(255)', otherId: 'INTEGER REFERENCES `otherTable` (`id`) ON DELETE CASCADE ON UPDATE NO ACTION'}],
expectation: "CREATE TABLE IF NOT EXISTS `myTable` (`title` VARCHAR(255), `name` VARCHAR(255), `otherId` INTEGER REFERENCES `otherTable` (`id`) ON DELETE CASCADE ON UPDATE NO ACTION);"
}
],
insertQuery: [
{
arguments: ['myTable', { name: 'foo' }],
......@@ -46,6 +121,43 @@ describe('QueryGenerator', function() {
}
],
bulkInsertQuery: [
{
arguments: ['myTable', [{name: 'foo'}, {name: 'bar'}]],
expectation: "INSERT INTO `myTable` (`name`) VALUES ('foo'),('bar');"
}, {
arguments: ['myTable', [{name: "'bar'"}, {name: 'foo'}]],
expectation: "INSERT INTO `myTable` (`name`) VALUES ('''bar'''),('foo');"
}, {
arguments: ['myTable', [{name: "bar", value: null}, {name: 'foo', value: 1}]],
expectation: "INSERT INTO `myTable` (`name`,`value`) VALUES ('bar',NULL),('foo',1);"
}, {
arguments: ['myTable', [{name: "bar", value: undefined}, {name: 'bar', value: 2}]],
expectation: "INSERT INTO `myTable` (`name`,`value`) VALUES ('bar',NULL),('bar',2);"
}, {
arguments: ['myTable', [{name: "foo", value: true}, {name: 'bar', value: false}]],
expectation: "INSERT INTO `myTable` (`name`,`value`) VALUES ('foo',1),('bar',0);"
}, {
arguments: ['myTable', [{name: "foo", value: false}, {name: 'bar', value: false}]],
expectation: "INSERT INTO `myTable` (`name`,`value`) VALUES ('foo',0),('bar',0);"
}, {
arguments: ['myTable', [{name: 'foo', foo: 1, nullValue: null}, {name: 'bar', foo: 2, nullValue: null}]],
expectation: "INSERT INTO `myTable` (`name`,`foo`,`nullValue`) VALUES ('foo',1,NULL),('bar',2,NULL);"
}, {
arguments: ['myTable', [{name: 'foo', foo: 1, nullValue: null}, {name: 'bar', foo: 2, nullValue: null}]],
expectation: "INSERT INTO `myTable` (`name`,`foo`,`nullValue`) VALUES ('foo',1,NULL),('bar',2,NULL);",
context: {options: {omitNull: false}}
}, {
arguments: ['myTable', [{name: 'foo', foo: 1, nullValue: null}, {name: 'bar', foo: 2, nullValue: null}]],
expectation: "INSERT INTO `myTable` (`name`,`foo`,`nullValue`) VALUES ('foo',1,NULL),('bar',2,NULL);",
context: {options: {omitNull: true}} // Note: We don't honour this because it makes little sense when some rows may have nulls and others not
}, {
arguments: ['myTable', [{name: 'foo', foo: 1, nullValue: null}, {name: 'bar', foo: 2, nullValue: null}]],
expectation: "INSERT INTO `myTable` (`name`,`foo`,`nullValue`) VALUES ('foo',1,NULL),('bar',2,NULL);",
context: {options: {omitNull: true}} // Note: As above
}
],
updateQuery: [
{
arguments: ['myTable', { name: 'foo' }, { id: 2 }],
......@@ -77,6 +189,25 @@ describe('QueryGenerator', function() {
expectation: "UPDATE `myTable` SET `bar`=2 WHERE `name`='foo'",
context: {options: {omitNull: true}}
}
],
deleteQuery: [
{
arguments: ['myTable', {name: 'foo'}],
expectation: "DELETE FROM `myTable` WHERE `name`='foo'"
}, {
arguments: ['myTable', 1],
expectation: "DELETE FROM `myTable` WHERE `id`=1"
}, {
arguments: ['myTable', 1, {limit: 10}],
expectation: "DELETE FROM `myTable` WHERE `id`=1"
}, {
arguments: ['myTable', {name: "foo';DROP TABLE myTable;"}, {limit: 10}],
expectation: "DELETE FROM `myTable` WHERE `name`='foo\\';DROP TABLE myTable;'"
}, {
arguments: ['myTable', {name: 'foo'}, {limit: null}],
expectation: "DELETE FROM `myTable` WHERE `name`='foo'"
}
]
};
......
......@@ -47,4 +47,136 @@ describe(Helpers.getTestDialectTeaser("BelongsTo"), function() {
})
})
})
describe("Foreign key constraints", function() {
it("are not enabled by default", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
Task.belongsTo(User)
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
task.setUser(user).success(function() {
user.destroy().success(function() {
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(1)
done()
})
})
})
})
})
})
})
it("can cascade deletes", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
Task.belongsTo(User, {onDelete: 'cascade'})
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
task.setUser(user).success(function() {
user.destroy().success(function() {
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(0)
done()
})
})
})
})
})
})
})
it("can restrict deletes", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
Task.belongsTo(User, {onDelete: 'restrict'})
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
task.setUser(user).success(function() {
user.destroy().error(function() {
// Should fail due to FK restriction
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(1)
done()
})
})
})
})
})
})
})
it("can cascade updates", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
Task.belongsTo(User, {onUpdate: 'cascade'})
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
task.setUser(user).success(function() {
// Changing the id of a DAO requires a little dance since
// the `UPDATE` query generated by `save()` uses `id` in the
// `WHERE` clause
var tableName = user.QueryInterface.QueryGenerator.addSchema(user.__factory)
user.QueryInterface.update(user, tableName, {id: 999}, user.id)
.success(function() {
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(1)
expect(tasks[0].UserId).toEqual(999)
done()
})
})
})
})
})
})
})
it("can restrict updates", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
Task.belongsTo(User, {onUpdate: 'restrict'})
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
task.setUser(user).success(function() {
// Changing the id of a DAO requires a little dance since
// the `UPDATE` query generated by `save()` uses `id` in the
// `WHERE` clause
var tableName = user.QueryInterface.QueryGenerator.addSchema(user.__factory)
user.QueryInterface.update(user, tableName, {id: 999}, user.id)
.error(function() {
// Should fail due to FK restriction
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(1)
done()
})
})
})
})
})
})
})
})
})
......@@ -288,7 +288,7 @@ describe(Helpers.getTestDialectTeaser("HasMany"), function() {
var add = this.spy()
this.stub(Sequelize.Utils, 'QueryChainer').returns({ add: add, run: function(){} })
this.stub(Sequelize.Utils, 'QueryChainer').returns({ add: add, runSerially: function(){} })
this.sequelize.sync({ force: true })
expect(add).toHaveBeenCalledThrice()
......@@ -323,4 +323,135 @@ describe(Helpers.getTestDialectTeaser("HasMany"), function() {
})
})
})
describe("Foreign key constraints", function() {
it("are not enabled by default", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
User.hasMany(Task)
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
user.setTasks([task]).success(function() {
user.destroy().success(function() {
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(1)
done()
})
})
})
})
})
})
})
it("can cascade deletes", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
User.hasMany(Task, {onDelete: 'cascade'})
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
user.setTasks([task]).success(function() {
user.destroy().success(function() {
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(0)
done()
})
})
})
})
})
})
})
it("can restrict deletes", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
User.hasMany(Task, {onDelete: 'restrict'})
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
user.setTasks([task]).success(function() {
user.destroy().error(function() {
// Should fail due to FK restriction
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(1)
done()
})
})
})
})
})
})
})
it("can cascade updates", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
User.hasMany(Task, {onUpdate: 'cascade'})
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
user.setTasks([task]).success(function() {
// Changing the id of a DAO requires a little dance since
// the `UPDATE` query generated by `save()` uses `id` in the
// `WHERE` clause
var tableName = user.QueryInterface.QueryGenerator.addSchema(user.__factory)
user.QueryInterface.update(user, tableName, {id: 999}, user.id)
.success(function() {
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(1)
expect(tasks[0].UserId).toEqual(999)
done()
})
})
})
})
})
})
})
it("can restrict updates", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
User.hasMany(Task, {onUpdate: 'restrict'})
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
user.setTasks([task]).success(function() {
// Changing the id of a DAO requires a little dance since
// the `UPDATE` query generated by `save()` uses `id` in the
// `WHERE` clause
var tableName = user.QueryInterface.QueryGenerator.addSchema(user.__factory)
user.QueryInterface.update(user, tableName, {id: 999}, user.id)
.error(function() {
// Should fail due to FK restriction
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(1)
done()
})
})
})
})
})
})
})
})
})
......@@ -47,4 +47,136 @@ describe(Helpers.getTestDialectTeaser("HasOne"), function() {
})
})
})
describe("Foreign key constraints", function() {
it("are not enabled by default", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
User.hasOne(Task)
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
user.setTask(task).success(function() {
user.destroy().success(function() {
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(1)
done()
})
})
})
})
})
})
})
it("can cascade deletes", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
User.hasOne(Task, {onDelete: 'cascade'})
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
user.setTask(task).success(function() {
user.destroy().success(function() {
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(0)
done()
})
})
})
})
})
})
})
it("can restrict deletes", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
User.hasOne(Task, {onDelete: 'restrict'})
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
user.setTask(task).success(function() {
user.destroy().error(function() {
// Should fail due to FK restriction
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(1)
done()
})
})
})
})
})
})
})
it("can cascade updates", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
User.hasOne(Task, {onUpdate: 'cascade'})
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
user.setTask(task).success(function() {
// Changing the id of a DAO requires a little dance since
// the `UPDATE` query generated by `save()` uses `id` in the
// `WHERE` clause
var tableName = user.QueryInterface.QueryGenerator.addSchema(user.__factory)
user.QueryInterface.update(user, tableName, {id: 999}, user.id)
.success(function() {
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(1)
expect(tasks[0].UserId).toEqual(999)
done()
})
})
})
})
})
})
})
it("can restrict updates", function(done) {
var Task = this.sequelize.define('Task', { title: Sequelize.STRING })
, User = this.sequelize.define('User', { username: Sequelize.STRING })
User.hasOne(Task, {onUpdate: 'restrict'})
this.sequelize.sync({ force: true }).success(function() {
User.create({ username: 'foo' }).success(function(user) {
Task.create({ title: 'task' }).success(function(task) {
user.setTask(task).success(function() {
// Changing the id of a DAO requires a little dance since
// the `UPDATE` query generated by `save()` uses `id` in the
// `WHERE` clause
var tableName = user.QueryInterface.QueryGenerator.addSchema(user.__factory)
user.QueryInterface.update(user, tableName, {id: 999}, user.id)
.error(function() {
// Should fail due to FK restriction
Task.findAll().success(function(tasks) {
expect(tasks.length).toEqual(1)
done()
})
})
})
})
})
})
})
})
})
if(typeof require === 'function') {
const buster = require("buster")
, CustomEventEmitter = require("../lib/emitters/custom-event-emitter")
, Helpers = require('./buster-helpers')
, dialect = Helpers.getTestDialect()
}
buster.spec.expose()
buster.testRunner.timeout = 1000
var Sequelize = require(__dirname + '/../index')
describe(Helpers.getTestDialectTeaser("Configuration"), function() {
describe('Instantiation with a URL string', function() {
it('should accept username, password, host, port, and database', function() {
var sequelize = new Sequelize('mysql://user:pass@example.com:9821/dbname')
var config = sequelize.config
var options = sequelize.options
expect(options.dialect).toEqual('mysql')
expect(config.database).toEqual('dbname')
expect(config.host).toEqual('example.com')
expect(config.username).toEqual('user')
expect(config.password).toEqual('pass')
expect(config.port).toEqual(9821)
})
it('should work with no authentication options', function() {
var sequelize = new Sequelize('mysql://example.com:9821/dbname')
var config = sequelize.config
expect(config.username).toEqual(undefined)
expect(config.password).toEqual(null)
})
it('should use the default port when no other is specified', function() {
var sequelize = new Sequelize('mysql://example.com/dbname')
var config = sequelize.config
// The default port should be set
expect(config.port).toEqual(3306)
})
})
describe('Intantiation with arguments', function() {
it('should accept two parameters (database, username)', function() {
var sequelize = new Sequelize('dbname', 'root')
var config = sequelize.config
expect(config.database).toEqual('dbname')
expect(config.username).toEqual('root')
})
it('should accept three parameters (database, username, password)', function() {
var sequelize = new Sequelize('dbname', 'root', 'pass')
var config = sequelize.config
expect(config.database).toEqual('dbname')
expect(config.username).toEqual('root')
expect(config.password).toEqual('pass')
})
it('should accept four parameters (database, username, password, options)', function() {
var sequelize = new Sequelize('dbname', 'root', 'pass', { port: 999 })
var config = sequelize.config
expect(config.database).toEqual('dbname')
expect(config.username).toEqual('root')
expect(config.password).toEqual('pass')
expect(config.port).toEqual(999)
})
})
})
......@@ -2,7 +2,7 @@ if (typeof require === 'function') {
const buster = require("buster")
, Helpers = require('./buster-helpers')
, dialect = Helpers.getTestDialect()
, _ = require('underscore')
, _ = require('lodash')
}
buster.spec.expose()
......@@ -19,7 +19,23 @@ describe(Helpers.getTestDialectTeaser("DAO"), function() {
username: { type: DataTypes.STRING },
touchedAt: { type: DataTypes.DATE, defaultValue: DataTypes.NOW },
aNumber: { type: DataTypes.INTEGER },
bNumber: { type: DataTypes.INTEGER }
bNumber: { type: DataTypes.INTEGER },
validateTest: {
type: DataTypes.INTEGER,
allowNull: true,
validate: {isInt: true}
},
validateCustom: {
type: DataTypes.STRING,
allowNull: true,
validate: {len: {msg: 'Length failed.', args: [1,20]}}
},
dateAllowNullTrue: {
type: DataTypes.DATE,
allowNull: true
}
})
self.HistoryLog = sequelize.define('HistoryLog', {
......@@ -27,10 +43,20 @@ describe(Helpers.getTestDialectTeaser("DAO"), function() {
aNumber: { type: DataTypes.INTEGER },
aRandomId: { type: DataTypes.INTEGER }
})
self.ParanoidUser = sequelize.define('ParanoidUser', {
username: { type: DataTypes.STRING }
}, {
paranoid: true
})
self.ParanoidUser.hasOne( self.ParanoidUser )
},
onComplete: function() {
self.User.sync({ force: true }).success(function(){
self.HistoryLog.sync({ force: true }).success(done)
self.HistoryLog.sync({ force: true }).success(function(){
self.ParanoidUser.sync({force: true }).success(done)
})
})
}
})
......@@ -320,6 +346,29 @@ describe(Helpers.getTestDialectTeaser("DAO"), function() {
expect(+user.touchedAt).toBe(5000)
})
})
describe('allowNull date', function() {
it('should be just "null" and not Date with Invalid Date', function(done) {
var self = this;
this.User.build({ username: 'a user'}).save().success(function() {
self.User.find({where: {username: 'a user'}}).success(function(user) {
expect(user.dateAllowNullTrue).toBe(null)
done()
})
})
})
it('should be the same valid date when saving the date', function(done) {
var self = this;
var date = new Date();
this.User.build({ username: 'a user', dateAllowNullTrue: date}).save().success(function() {
self.User.find({where: {username: 'a user'}}).success(function(user) {
expect(user.dateAllowNullTrue.toString()).toEqual(date.toString())
done()
})
})
})
})
})
describe('complete', function() {
......@@ -341,6 +390,44 @@ describe(Helpers.getTestDialectTeaser("DAO"), function() {
})
describe('save', function() {
it('should fail a validation upon creating', function(done){
this.User.create({aNumber: 0, validateTest: 'hello'}).error(function(err){
expect(err).toBeDefined()
expect(err).toBeObject()
expect(err.validateTest).toBeArray()
expect(err.validateTest[0]).toBeDefined()
expect(err.validateTest[0].indexOf('Invalid integer')).toBeGreaterThan(-1);
done();
});
})
it('should fail a validation upon building', function(done){
this.User.build({aNumber: 0, validateCustom: 'aaaaaaaaaaaaaaaaaaaaaaaaaa'}).save()
.error(function(err){
expect(err).toBeDefined()
expect(err).toBeObject()
expect(err.validateCustom).toBeDefined()
expect(err.validateCustom).toBeArray()
expect(err.validateCustom[0]).toBeDefined()
expect(err.validateCustom[0]).toEqual('Length failed.')
done()
})
})
it('should fail a validation when updating', function(done){
this.User.create({aNumber: 0}).success(function(user){
user.updateAttributes({validateTest: 'hello'}).error(function(err){
expect(err).toBeDefined()
expect(err).toBeObject()
expect(err.validateTest).toBeDefined()
expect(err.validateTest).toBeArray()
expect(err.validateTest[0]).toBeDefined()
expect(err.validateTest[0].indexOf('Invalid integer:')).toBeGreaterThan(-1)
done()
})
})
})
it('takes zero into account', function(done) {
this.User.build({ aNumber: 0 }).save([ 'aNumber' ]).success(function(user) {
expect(user.aNumber).toEqual(0)
......@@ -467,6 +554,51 @@ describe(Helpers.getTestDialectTeaser("DAO"), function() {
}.bind(this))
})
it("creates the deletedAt property, when defining paranoid as true", function(done) {
this.ParanoidUser.create({ username: 'fnord' }).success(function() {
this.ParanoidUser.findAll().success(function(users) {
expect(users[0].deletedAt).toBeDefined()
expect(users[0].deletedAt).toBe(null)
done()
}.bind(this))
}.bind(this))
})
it("sets deletedAt property to a specific date when deleting an instance", function(done) {
this.ParanoidUser.create({ username: 'fnord' }).success(function() {
this.ParanoidUser.findAll().success(function(users) {
users[0].destroy().success(function(user) {
expect(user.deletedAt.getMonth).toBeDefined()
done()
}.bind(this))
}.bind(this))
}.bind(this))
})
it("keeps the deletedAt-attribute with value null, when running updateAttributes", function(done) {
this.ParanoidUser.create({ username: 'fnord' }).success(function() {
this.ParanoidUser.findAll().success(function(users) {
users[0].updateAttributes({username: 'newFnord'}).success(function(user) {
expect(user.deletedAt).toBe(null)
done()
}.bind(this))
}.bind(this))
}.bind(this))
})
it("keeps the deletedAt-attribute with value null, when updating associations", function(done) {
this.ParanoidUser.create({ username: 'fnord' }).success(function() {
this.ParanoidUser.findAll().success(function(users) {
this.ParanoidUser.create({ username: 'linkedFnord' }).success(function( linkedUser ) {
users[0].setParanoidUser( linkedUser ).success(function(user) {
expect(user.deletedAt).toBe(null)
done()
}.bind(this))
}.bind(this))
}.bind(this))
}.bind(this))
})
it("can reuse query option objects", function(done) {
this.User.create({ username: 'fnord' }).success(function() {
var query = { where: { username: 'fnord' }}
......@@ -512,4 +644,44 @@ describe(Helpers.getTestDialectTeaser("DAO"), function() {
}.bind(this))
})
})
describe('updateAttributes', function() {
it('stores and restores null values', function(done) {
var Download = this.sequelize.define('download', {
startedAt: Helpers.Sequelize.DATE,
canceledAt: Helpers.Sequelize.DATE,
finishedAt: Helpers.Sequelize.DATE
})
Download.sync({ force: true }).success(function() {
Download.create({
startedAt: new Date()
}).success(function(download) {
expect(download.startedAt instanceof Date).toBeTrue()
expect(download.canceledAt).toBeFalsy()
expect(download.finishedAt).toBeFalsy()
download.updateAttributes({
canceledAt: new Date()
}).success(function(download) {
expect(download.startedAt instanceof Date).toBeTrue()
expect(download.canceledAt instanceof Date).toBeTrue()
expect(download.finishedAt).toBeFalsy()
Download.all({
where: (dialect === 'postgres' ? '"finishedAt" IS NULL' : "`finishedAt` IS NULL")
}).success(function(downloads) {
downloads.forEach(function(download) {
expect(download.startedAt instanceof Date).toBeTrue()
expect(download.canceledAt instanceof Date).toBeTrue()
expect(download.finishedAt).toBeFalsy()
})
done()
})
})
})
})
})
})
})
......@@ -42,6 +42,10 @@ describe(Helpers.getTestDialectTeaser("DAO"), function() {
fail: "abc",
pass: "129.89.23.1"
}
, isIPv6 : {
fail: '1111:2222:3333::5555:',
pass: 'fe80:0000:0000:0000:0204:61ff:fe9d:f156'
}
, isAlpha : {
fail: "012",
pass: "abc"
......
......@@ -7,7 +7,7 @@ if(typeof require === 'function') {
buster.spec.expose()
describe(Helpers.getTestDialectTeaser('Data types'), function() {
describe(Helpers.getTestDialectTeaser('DataTypes'), function() {
it('should return DECIMAL for the default decimal type', function() {
expect(Sequelize.DECIMAL).toEqual('DECIMAL');
});
......@@ -15,4 +15,52 @@ describe(Helpers.getTestDialectTeaser('Data types'), function() {
it('should return DECIMAL(10,2) for the default decimal type with arguments', function() {
expect(Sequelize.DECIMAL(10, 2)).toEqual('DECIMAL(10,2)');
});
});
var tests = [
[Sequelize.STRING, 'STRING', 'VARCHAR(255)'],
[Sequelize.STRING(1234), 'STRING(1234)', 'VARCHAR(1234)'],
[Sequelize.STRING(1234).BINARY, 'STRING(1234).BINARY', 'VARCHAR(1234) BINARY'],
[Sequelize.STRING.BINARY, 'STRING.BINARY', 'VARCHAR(255) BINARY'],
[Sequelize.TEXT, 'TEXT', 'TEXT'],
[Sequelize.DATE, 'DATE', 'DATETIME'],
[Sequelize.NOW, 'NOW', 'NOW'],
[Sequelize.BOOLEAN, 'BOOLEAN', 'TINYINT(1)'],
[Sequelize.INTEGER, 'INTEGER', 'INTEGER'],
[Sequelize.INTEGER.UNSIGNED, 'INTEGER.UNSIGNED', 'INTEGER UNSIGNED'],
[Sequelize.INTEGER(11), 'INTEGER(11)','INTEGER(11)'],
[Sequelize.INTEGER(11).UNSIGNED, 'INTEGER(11).UNSIGNED', 'INTEGER(11) UNSIGNED'],
[Sequelize.INTEGER(11).UNSIGNED.ZEROFILL,'INTEGER(11).UNSIGNED.ZEROFILL','INTEGER(11) UNSIGNED ZEROFILL'],
[Sequelize.INTEGER(11).ZEROFILL,'INTEGER(11).ZEROFILL', 'INTEGER(11) ZEROFILL'],
[Sequelize.INTEGER(11).ZEROFILL.UNSIGNED,'INTEGER(11).ZEROFILL.UNSIGNED', 'INTEGER(11) UNSIGNED ZEROFILL'],
[Sequelize.BIGINT, 'BIGINT', 'BIGINT'],
[Sequelize.BIGINT.UNSIGNED, 'BIGINT.UNSIGNED', 'BIGINT UNSIGNED'],
[Sequelize.BIGINT(11), 'BIGINT(11)','BIGINT(11)'],
[Sequelize.BIGINT(11).UNSIGNED, 'BIGINT(11).UNSIGNED', 'BIGINT(11) UNSIGNED'],
[Sequelize.BIGINT(11).UNSIGNED.ZEROFILL, 'BIGINT(11).UNSIGNED.ZEROFILL','BIGINT(11) UNSIGNED ZEROFILL'],
[Sequelize.BIGINT(11).ZEROFILL, 'BIGINT(11).ZEROFILL', 'BIGINT(11) ZEROFILL'],
[Sequelize.BIGINT(11).ZEROFILL.UNSIGNED, 'BIGINT(11).ZEROFILL.UNSIGNED', 'BIGINT(11) UNSIGNED ZEROFILL'],
[Sequelize.FLOAT, 'FLOAT', 'FLOAT'],
[Sequelize.FLOAT.UNSIGNED, 'FLOAT.UNSIGNED', 'FLOAT UNSIGNED'],
[Sequelize.FLOAT(11), 'FLOAT(11)','FLOAT(11)'],
[Sequelize.FLOAT(11).UNSIGNED, 'FLOAT(11).UNSIGNED', 'FLOAT(11) UNSIGNED'],
[Sequelize.FLOAT(11).UNSIGNED.ZEROFILL,'FLOAT(11).UNSIGNED.ZEROFILL','FLOAT(11) UNSIGNED ZEROFILL'],
[Sequelize.FLOAT(11).ZEROFILL,'FLOAT(11).ZEROFILL', 'FLOAT(11) ZEROFILL'],
[Sequelize.FLOAT(11).ZEROFILL.UNSIGNED,'FLOAT(11).ZEROFILL.UNSIGNED', 'FLOAT(11) UNSIGNED ZEROFILL'],
[Sequelize.FLOAT(11, 12), 'FLOAT(11,12)','FLOAT(11,12)'],
[Sequelize.FLOAT(11, 12).UNSIGNED, 'FLOAT(11,12).UNSIGNED', 'FLOAT(11,12) UNSIGNED'],
[Sequelize.FLOAT(11, 12).UNSIGNED.ZEROFILL,'FLOAT(11,12).UNSIGNED.ZEROFILL','FLOAT(11,12) UNSIGNED ZEROFILL'],
[Sequelize.FLOAT(11, 12).ZEROFILL,'FLOAT(11,12).ZEROFILL', 'FLOAT(11,12) ZEROFILL'],
[Sequelize.FLOAT(11, 12).ZEROFILL.UNSIGNED,'FLOAT(11,12).ZEROFILL.UNSIGNED', 'FLOAT(11,12) UNSIGNED ZEROFILL']
]
tests.forEach(function(test) {
it('transforms "' + test[1] + '" to "' + test[2] + '"', function() {
expect(test[0]).toEqual(test[2])
})
})
})
if(typeof require === 'function') {
const buster = require("buster")
, CustomEventEmitter = require("../../lib/emitters/custom-event-emitter")
, Helpers = require('../buster-helpers')
, dialect = Helpers.getTestDialect()
}
buster.spec.expose()
buster.testRunner.timeout = 1000
describe(Helpers.getTestDialectTeaser("CustomEventEmitter"), function() {
describe("proxy", function () {
/* Tests could _probably_ be run synchronously, but for future proofing we're basing it on the events */
it("should correctly work with success listeners", function (done) {
var emitter = new CustomEventEmitter()
, proxy = new CustomEventEmitter()
, success = this.spy()
emitter.success(success)
proxy.success(function () {
process.nextTick(function () {
expect(success.called).toEqual(true)
done();
})
})
proxy.proxy(emitter)
proxy.emit('success')
})
it("should correctly work with error listeners", function (done) {
var emitter = new CustomEventEmitter()
, proxy = new CustomEventEmitter()
, error = this.spy()
emitter.error(error)
proxy.error(function () {
process.nextTick(function () {
expect(error.called).toEqual(true)
done();
})
})
proxy.proxy(emitter)
proxy.emit('error')
})
it("should correctly work with complete/done listeners", function (done) {
var emitter = new CustomEventEmitter()
, proxy = new CustomEventEmitter()
, complete = this.spy()
emitter.complete(complete)
proxy.complete(function () {
process.nextTick(function () {
expect(complete.called).toEqual(true)
done();
})
})
proxy.proxy(emitter)
proxy.emit('success')
})
});
});
\ No newline at end of file
......@@ -14,8 +14,8 @@ describe(Helpers.getTestDialectTeaser("Migrator"), function() {
before(function(done) {
this.init = function(options, callback) {
options = Helpers.Sequelize.Utils._.extend({
path: __dirname + '/assets/migrations',
logging: false
path: __dirname + '/assets/migrations',
logging: function(){}
}, options || {})
var migrator = new Migrator(this.sequelize, options)
......
......@@ -18,7 +18,8 @@ if (dialect.match(/^postgres/)) {
self.User = sequelize.define('User', {
username: DataTypes.STRING,
email: {type: DataTypes.ARRAY(DataTypes.TEXT)}
email: {type: DataTypes.ARRAY(DataTypes.TEXT)},
document: {type: DataTypes.HSTORE, defaultValue: 'default=>value'}
})
},
onComplete: function() {
......@@ -34,13 +35,34 @@ if (dialect.match(/^postgres/)) {
this.User
.create({ username: 'user', email: ['foo@bar.com', 'bar@baz.com'] })
.success(function(oldUser) {
expect(oldUser.email).toEqual(['foo@bar.com', 'bar@baz.com']);
done();
expect(oldUser.email).toEqual(['foo@bar.com', 'bar@baz.com'])
done()
})
.error(function(err) {
console.log(err)
})
})
it("should handle hstore correctly", function(done) {
var self = this
this.User
.create({ username: 'user', email: ['foo@bar.com'], document: {hello: 'world'}})
.success(function(newUser) {
expect(newUser.document).toEqual({hello: 'world'})
// Check to see if updating an hstore field works
newUser.updateAttributes({document: {should: 'update', to: 'this', first: 'place'}}).success(function(oldUser){
// Postgres always returns keys in alphabetical order (ascending)
expect(oldUser.document).toEqual({first: 'place', should: 'update', to: 'this'})
// Check to see if the default value for an hstore field works
self.User.create({ username: 'user2', email: ['bar@baz.com']}).success(function(defaultUser){
expect(defaultUser.document).toEqual({default: 'value'})
done()
})
})
})
.error(console.log)
})
})
})
}
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!