(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o= start) { // Calculate the midway point (divide and conquer) midwayIndex = Math.floor((start + end) / 2); if (lastMidwayIndex === midwayIndex) { // No more items to scan break; } // Get the item to compare against lookupItem = arr[midwayIndex]; if (lookupItem !== undefined) { // Compare items result = fn(this, obj, item, lookupItem); if (result > 0) { start = midwayIndex + 1; } if (result < 0) { end = midwayIndex - 1; } } lastMidwayIndex = midwayIndex; } if (result > 0) { return midwayIndex + 1; } else { return midwayIndex; } }; /** * Calculates the sort position of an item against another item. * @param {Object} sorter An object or instance that contains * sortAsc and sortDesc methods. * @param {Object} obj The document to compare. * @param {String} a The first key to compare. * @param {String} b The second key to compare. * @returns {Number} Either 1 for sort a after b or -1 to sort * a before b. * @private */ ActiveBucket.prototype._sortFunc = function (sorter, obj, a, b) { var aVals = a.split('.:.'), bVals = b.split('.:.'), arr = sorter._keyArr, count = arr.length, index, sortType, castType; for (index = 0; index < count; index++) { sortType = arr[index]; castType = typeof sharedPathSolver.get(obj, sortType.path); if (castType === 'number') { aVals[index] = Number(aVals[index]); bVals[index] = Number(bVals[index]); } // Check for non-equal items if (aVals[index] !== bVals[index]) { // Return the sorted items if (sortType.value === 1) { return sorter.sortAsc(aVals[index], bVals[index]); } if (sortType.value === -1) { return sorter.sortDesc(aVals[index], bVals[index]); } } } }; /** * Inserts a document into the active bucket. * @param {Object} obj The document to insert. * @returns {Number} The index the document now occupies. */ ActiveBucket.prototype.insert = function (obj) { var key, keyIndex; key = this.documentKey(obj); keyIndex = this._data.indexOf(key); if (keyIndex === -1) { // Insert key keyIndex = this.qs(obj, this._data, key, this._sortFunc); this._data.splice(keyIndex, 0, key); } else { this._data.splice(keyIndex, 0, key); } this._objLookup[obj[this._primaryKey]] = key; this._count++; return keyIndex; }; /** * Removes a document from the active bucket. * @param {Object} obj The document to remove. * @returns {Number} The index of the document if the document * was removed successfully or -1 if it wasn't found in the * active bucket. */ ActiveBucket.prototype.remove = function (obj) { var key, keyIndex; key = this._objLookup[obj[this._primaryKey]]; if (key) { keyIndex = this._data.indexOf(key); if (keyIndex > -1) { this._data.splice(keyIndex, 1); delete this._objLookup[obj[this._primaryKey]]; this._count--; return keyIndex; } else { return keyIndex; } } return -1; }; /** * Get the index that the passed document currently occupies * or the index it will occupy if added to the active bucket. * @param {Object} obj The document to get the index for. * @returns {Number} The index. */ ActiveBucket.prototype.index = function (obj) { var key, keyIndex; key = this.documentKey(obj); keyIndex = this._data.indexOf(key); if (keyIndex === -1) { // Get key index keyIndex = this.qs(obj, this._data, key, this._sortFunc); } return keyIndex; }; /** * The key that represents the passed document. * @param {Object} obj The document to get the key for. * @returns {String} The document key. */ ActiveBucket.prototype.documentKey = function (obj) { var key = '', arr = this._keyArr, count = arr.length, val, index, sortType; for (index = 0; index < count; index++) { sortType = arr[index]; if (key) { key += '.:.'; } val = sharedPathSolver.get(obj, sortType.path); if (val !== undefined) { // The value of the sort path in the object is not undefined key += val; } else { // The value of the sort path in the object is undefined. This // will cause sorting issues. undefined is therefore recorded // as string zero which will sort to low value. key += "0"; } } // Add the unique identifier on the end of the key key += '.:.' + obj[this._primaryKey]; return key; }; /** * Get the number of documents currently indexed in the active * bucket instance. * @returns {Number} The number of documents. */ ActiveBucket.prototype.count = function () { return this._count; }; Shared.finishModule('ActiveBucket'); module.exports = ActiveBucket; },{"./Path":34,"./Shared":40}],4:[function(_dereq_,module,exports){ "use strict"; var Shared, ReactorIO, Collection, CollectionInit, Db, DbInit, BinaryLog; Shared = _dereq_('./Shared'); BinaryLog = function () { this.init.apply(this, arguments); }; BinaryLog.prototype.init = function (parent) { var self = this; self._logCounter = 0; self._parent = parent; self.size(1000); }; Shared.addModule('BinaryLog', BinaryLog); Shared.mixin(BinaryLog.prototype, 'Mixin.Common'); Shared.mixin(BinaryLog.prototype, 'Mixin.ChainReactor'); Shared.mixin(BinaryLog.prototype, 'Mixin.Events'); Collection = Shared.modules.Collection; Db = Shared.modules.Db; ReactorIO = Shared.modules.ReactorIO; CollectionInit = Collection.prototype.init; DbInit = Db.prototype.init; Shared.synthesize(BinaryLog.prototype, 'name'); Shared.synthesize(BinaryLog.prototype, 'size'); BinaryLog.prototype.attachIO = function () { var self = this; if (!self._io) { self._log = new Collection(self._parent.name() + '-BinaryLog', {capped: true, size: self.size()}); // Override the log collection's id generator so it is linear self._log.objectId = function (id) { if (!id) { id = ++self._logCounter; } return id; }; self._io = new ReactorIO(self._parent, self, function (chainPacket) { self._log.insert({ type: chainPacket.type, data: chainPacket.data }); // Returning false informs the chain reactor to continue propagation // of the chain packet down the graph tree return false; }); } }; BinaryLog.prototype.detachIO = function () { var self = this; if (self._io) { self._log.drop(); self._io.drop(); delete self._log; delete self._io; } }; Collection.prototype.init = function () { CollectionInit.apply(this, arguments); this._binaryLog = new BinaryLog(this); }; Shared.finishModule('BinaryLog'); module.exports = BinaryLog; },{"./Shared":40}],5:[function(_dereq_,module,exports){ "use strict"; var Shared = _dereq_('./Shared'), Path = _dereq_('./Path'), sharedPathSolver = new Path(); var BinaryTree = function (data, compareFunc, hashFunc) { this.init.apply(this, arguments); }; BinaryTree.prototype.init = function (data, index, primaryKey, compareFunc, hashFunc) { this._store = []; this._keys = []; if (primaryKey !== undefined) { this.primaryKey(primaryKey); } if (index !== undefined) { this.index(index); } if (compareFunc !== undefined) { this.compareFunc(compareFunc); } if (hashFunc !== undefined) { this.hashFunc(hashFunc); } if (data !== undefined) { this.data(data); } }; Shared.addModule('BinaryTree', BinaryTree); Shared.mixin(BinaryTree.prototype, 'Mixin.ChainReactor'); Shared.mixin(BinaryTree.prototype, 'Mixin.Sorting'); Shared.mixin(BinaryTree.prototype, 'Mixin.Common'); Shared.synthesize(BinaryTree.prototype, 'compareFunc'); Shared.synthesize(BinaryTree.prototype, 'hashFunc'); Shared.synthesize(BinaryTree.prototype, 'indexDir'); Shared.synthesize(BinaryTree.prototype, 'primaryKey'); Shared.synthesize(BinaryTree.prototype, 'keys'); Shared.synthesize(BinaryTree.prototype, 'index', function (index) { if (index !== undefined) { if (this.debug()) { console.log('Setting index', index, sharedPathSolver.parse(index, true)); } // Convert the index object to an array of key val objects this.keys(sharedPathSolver.parse(index, true)); } return this.$super.call(this, index); }); /** * Remove all data from the binary tree. */ BinaryTree.prototype.clear = function () { delete this._data; delete this._left; delete this._right; this._store = []; }; /** * Sets this node's data object. All further inserted documents that * match this node's key and value will be pushed via the push() * method into the this._store array. When deciding if a new data * should be created left, right or middle (pushed) of this node the * new data is checked against the data set via this method. * @param val * @returns {*} */ BinaryTree.prototype.data = function (val) { if (val !== undefined) { this._data = val; if (this._hashFunc) { this._hash = this._hashFunc(val); } return this; } return this._data; }; /** * Pushes an item to the binary tree node's store array. * @param {*} val The item to add to the store. * @returns {*} */ BinaryTree.prototype.push = function (val) { if (val !== undefined) { this._store.push(val); return this; } return false; }; /** * Pulls an item from the binary tree node's store array. * @param {*} val The item to remove from the store. * @returns {*} */ BinaryTree.prototype.pull = function (val) { if (val !== undefined) { var index = this._store.indexOf(val); if (index > -1) { this._store.splice(index, 1); return this; } } return false; }; /** * Default compare method. Can be overridden. * @param a * @param b * @returns {Number} * @private */ BinaryTree.prototype._compareFunc = function (a, b) { // Loop the index array var i, indexData, result = 0; for (i = 0; i < this._keys.length; i++) { indexData = this._keys[i]; if (indexData.value === 1) { result = this.sortAscIgnoreUndefined(sharedPathSolver.get(a, indexData.path), sharedPathSolver.get(b, indexData.path)); } else if (indexData.value === -1) { result = this.sortDescIgnoreUndefined(sharedPathSolver.get(a, indexData.path), sharedPathSolver.get(b, indexData.path)); } if (this.debug()) { console.log('Compared %s with %s order %d in path %s and result was %d', sharedPathSolver.get(a, indexData.path), sharedPathSolver.get(b, indexData.path), indexData.value, indexData.path, result); } if (result !== 0) { if (this.debug()) { console.log('Retuning result %d', result); } return result; } } if (this.debug()) { console.log('Retuning result %d', result); } return result; }; /** * Default hash function. Can be overridden. * @param obj * @private */ BinaryTree.prototype._hashFunc = function (obj) { /*var i, indexData, hash = ''; for (i = 0; i < this._keys.length; i++) { indexData = this._keys[i]; if (hash) { hash += '_'; } hash += obj[indexData.path]; } return hash;*/ return obj[this._keys[0].path]; }; /** * Removes (deletes reference to) either left or right child if the passed * node matches one of them. * @param {BinaryTree} node The node to remove. */ BinaryTree.prototype.removeChildNode = function (node) { if (this._left === node) { // Remove left delete this._left; } else if (this._right === node) { // Remove right delete this._right; } }; /** * Returns the branch this node matches (left or right). * @param node * @returns {String} */ BinaryTree.prototype.nodeBranch = function (node) { if (this._left === node) { return 'left'; } else if (this._right === node) { return 'right'; } }; /** * Inserts a document into the binary tree. * @param data * @returns {*} */ BinaryTree.prototype.insert = function (data) { var result, inserted, failed, i; if (data instanceof Array) { // Insert array of data inserted = []; failed = []; for (i = 0; i < data.length; i++) { if (this.insert(data[i])) { inserted.push(data[i]); } else { failed.push(data[i]); } } return { inserted: inserted, failed: failed }; } if (this.debug()) { console.log('Inserting', data); } if (!this._data) { if (this.debug()) { console.log('Node has no data, setting data', data); } // Insert into this node (overwrite) as there is no data this.data(data); //this.push(data); return true; } result = this._compareFunc(this._data, data); if (result === 0) { if (this.debug()) { console.log('Data is equal (currrent, new)', this._data, data); } //this.push(data); // Less than this node if (this._left) { // Propagate down the left branch this._left.insert(data); } else { // Assign to left branch this._left = new BinaryTree(data, this._index, this._binaryTree, this._compareFunc, this._hashFunc); this._left._parent = this; } return true; } if (result === -1) { if (this.debug()) { console.log('Data is greater (currrent, new)', this._data, data); } // Greater than this node if (this._right) { // Propagate down the right branch this._right.insert(data); } else { // Assign to right branch this._right = new BinaryTree(data, this._index, this._binaryTree, this._compareFunc, this._hashFunc); this._right._parent = this; } return true; } if (result === 1) { if (this.debug()) { console.log('Data is less (currrent, new)', this._data, data); } // Less than this node if (this._left) { // Propagate down the left branch this._left.insert(data); } else { // Assign to left branch this._left = new BinaryTree(data, this._index, this._binaryTree, this._compareFunc, this._hashFunc); this._left._parent = this; } return true; } return false; }; BinaryTree.prototype.remove = function (data) { var pk = this.primaryKey(), result, removed, i; if (data instanceof Array) { // Insert array of data removed = []; for (i = 0; i < data.length; i++) { if (this.remove(data[i])) { removed.push(data[i]); } } return removed; } if (this.debug()) { console.log('Removing', data); } if (this._data[pk] === data[pk]) { // Remove this node return this._remove(this); } // Compare the data to work out which branch to send the remove command down result = this._compareFunc(this._data, data); if (result === -1 && this._right) { return this._right.remove(data); } if (result === 1 && this._left) { return this._left.remove(data); } return false; }; BinaryTree.prototype._remove = function (node) { var leftNode, rightNode; if (this._left) { // Backup branch data leftNode = this._left; rightNode = this._right; // Copy data from left node this._left = leftNode._left; this._right = leftNode._right; this._data = leftNode._data; this._store = leftNode._store; if (rightNode) { // Attach the rightNode data to the right-most node // of the leftNode leftNode.rightMost()._right = rightNode; } } else if (this._right) { // Backup branch data rightNode = this._right; // Copy data from right node this._left = rightNode._left; this._right = rightNode._right; this._data = rightNode._data; this._store = rightNode._store; } else { this.clear(); } return true; }; BinaryTree.prototype.leftMost = function () { if (!this._left) { return this; } else { return this._left.leftMost(); } }; BinaryTree.prototype.rightMost = function () { if (!this._right) { return this; } else { return this._right.rightMost(); } }; /** * Searches the binary tree for all matching documents based on the data * passed (query). * @param {Object} data The data / document to use for lookups. * @param {Object} options An options object. * @param {Operation} op An optional operation instance. Pass undefined * if not being used. * @param {Array=} resultArr The results passed between recursive calls. * Do not pass anything into this argument when calling externally. * @returns {*|Array} */ BinaryTree.prototype.lookup = function (data, options, op, resultArr) { var result = this._compareFunc(this._data, data); resultArr = resultArr || []; if (result === 0) { if (this._left) { this._left.lookup(data, options, op, resultArr); } resultArr.push(this._data); if (this._right) { this._right.lookup(data, options, op, resultArr); } } if (result === -1) { if (this._right) { this._right.lookup(data, options, op, resultArr); } } if (result === 1) { if (this._left) { this._left.lookup(data, options, op, resultArr); } } return resultArr; }; /** * Returns the entire binary tree ordered. * @param {String} type * @param resultArr * @returns {*|Array} */ BinaryTree.prototype.inOrder = function (type, resultArr) { resultArr = resultArr || []; if (this._left) { this._left.inOrder(type, resultArr); } switch (type) { case 'hash': resultArr.push(this._hash); break; case 'data': resultArr.push(this._data); break; default: resultArr.push({ key: this._data, arr: this._store }); break; } if (this._right) { this._right.inOrder(type, resultArr); } return resultArr; }; /** * Searches the binary tree for all matching documents based on the regular * expression passed. * @param path * @param val * @param regex * @param {Array=} resultArr The results passed between recursive calls. * Do not pass anything into this argument when calling externally. * @returns {*|Array} */ BinaryTree.prototype.startsWith = function (path, val, regex, resultArr) { var reTest, thisDataPathVal = sharedPathSolver.get(this._data, path), thisDataPathValSubStr = thisDataPathVal.substr(0, val.length), result; //regex = regex || new RegExp('^' + val); resultArr = resultArr || []; if (resultArr._visitedCount === undefined) { resultArr._visitedCount = 0; } resultArr._visitedCount++; resultArr._visitedNodes = resultArr._visitedNodes || []; resultArr._visitedNodes.push(thisDataPathVal); result = this.sortAscIgnoreUndefined(thisDataPathValSubStr, val); reTest = thisDataPathValSubStr === val; if (result === 0) { if (this._left) { this._left.startsWith(path, val, regex, resultArr); } if (reTest) { resultArr.push(this._data); } if (this._right) { this._right.startsWith(path, val, regex, resultArr); } } if (result === -1) { if (reTest) { resultArr.push(this._data); } if (this._right) { this._right.startsWith(path, val, regex, resultArr); } } if (result === 1) { if (this._left) { this._left.startsWith(path, val, regex, resultArr); } if (reTest) { resultArr.push(this._data); } } return resultArr; }; /*BinaryTree.prototype.find = function (type, search, resultArr) { resultArr = resultArr || []; if (this._left) { this._left.find(type, search, resultArr); } // Check if this node's data is greater or less than the from value var fromResult = this.sortAsc(this._data[key], from), toResult = this.sortAsc(this._data[key], to); if ((fromResult === 0 || fromResult === 1) && (toResult === 0 || toResult === -1)) { // This data node is greater than or equal to the from value, // and less than or equal to the to value so include it switch (type) { case 'hash': resultArr.push(this._hash); break; case 'data': resultArr.push(this._data); break; default: resultArr.push({ key: this._data, arr: this._store }); break; } } if (this._right) { this._right.find(type, search, resultArr); } return resultArr; };*/ /** * * @param {String} type * @param {String} key The data key / path to range search against. * @param {Number} from Range search from this value (inclusive) * @param {Number} to Range search to this value (inclusive) * @param {Array=} resultArr Leave undefined when calling (internal use), * passes the result array between recursive calls to be returned when * the recursion chain completes. * @param {Path=} pathResolver Leave undefined when calling (internal use), * caches the path resolver instance for performance. * @returns {Array} Array of matching document objects */ BinaryTree.prototype.findRange = function (type, key, from, to, resultArr, pathResolver) { resultArr = resultArr || []; pathResolver = pathResolver || new Path(key); if (this._left) { this._left.findRange(type, key, from, to, resultArr, pathResolver); } // Check if this node's data is greater or less than the from value var pathVal = pathResolver.value(this._data), fromResult = this.sortAscIgnoreUndefined(pathVal, from), toResult = this.sortAscIgnoreUndefined(pathVal, to); if ((fromResult === 0 || fromResult === 1) && (toResult === 0 || toResult === -1)) { // This data node is greater than or equal to the from value, // and less than or equal to the to value so include it switch (type) { case 'hash': resultArr.push(this._hash); break; case 'data': resultArr.push(this._data); break; default: resultArr.push({ key: this._data, arr: this._store }); break; } } if (this._right) { this._right.findRange(type, key, from, to, resultArr, pathResolver); } return resultArr; }; /*BinaryTree.prototype.findRegExp = function (type, key, pattern, resultArr) { resultArr = resultArr || []; if (this._left) { this._left.findRegExp(type, key, pattern, resultArr); } // Check if this node's data is greater or less than the from value var fromResult = this.sortAsc(this._data[key], from), toResult = this.sortAsc(this._data[key], to); if ((fromResult === 0 || fromResult === 1) && (toResult === 0 || toResult === -1)) { // This data node is greater than or equal to the from value, // and less than or equal to the to value so include it switch (type) { case 'hash': resultArr.push(this._hash); break; case 'data': resultArr.push(this._data); break; default: resultArr.push({ key: this._data, arr: this._store }); break; } } if (this._right) { this._right.findRegExp(type, key, pattern, resultArr); } return resultArr; };*/ /** * Determines if the passed query and options object will be served * by this index successfully or not and gives a score so that the * DB search system can determine how useful this index is in comparison * to other indexes on the same collection. * @param query * @param queryOptions * @param matchOptions * @returns {{matchedKeys: Array, totalKeyCount: Number, score: number}} */ BinaryTree.prototype.match = function (query, queryOptions, matchOptions) { // Check if the passed query has data in the keys our index // operates on and if so, is the query sort matching our order var indexKeyArr, queryArr, matchedKeys = [], matchedKeyCount = 0, i; indexKeyArr = sharedPathSolver.parseArr(this._index, { verbose: true }); queryArr = sharedPathSolver.parseArr(query, matchOptions && matchOptions.pathOptions ? matchOptions.pathOptions : { ignore:/\$/, verbose: true }); // Loop the query array and check the order of keys against the // index key array to see if this index can be used for (i = 0; i < indexKeyArr.length; i++) { if (queryArr[i] === indexKeyArr[i]) { matchedKeyCount++; matchedKeys.push(queryArr[i]); } } return { matchedKeys: matchedKeys, totalKeyCount: queryArr.length, score: matchedKeyCount }; //return sharedPathSolver.countObjectPaths(this._keys, query); }; Shared.finishModule('BinaryTree'); module.exports = BinaryTree; },{"./Path":34,"./Shared":40}],6:[function(_dereq_,module,exports){ "use strict"; var Shared, Db, Metrics, KeyValueStore, Path, IndexHashMap, IndexBinaryTree, Index2d, Overload, ReactorIO, Condition, sharedPathSolver; Shared = _dereq_('./Shared'); /** * Creates a new collection. Collections store multiple documents and * handle CRUD against those documents. * @constructor * @class */ var Collection = function (name, options) { this.init.apply(this, arguments); }; /** * Creates a new collection. Collections store multiple documents and * handle CRUD against those documents. */ Collection.prototype.init = function (name, options) { // Ensure we have an options object options = options || {}; // Set internals this.sharedPathSolver = sharedPathSolver; this._primaryKey = options.primaryKey || '_id'; this._primaryIndex = new KeyValueStore('primary', {primaryKey: this.primaryKey()}); this._primaryCrc = new KeyValueStore('primaryCrc', {primaryKey: this.primaryKey()}); this._crcLookup = new KeyValueStore('crcLookup', {primaryKey: this.primaryKey()}); this._name = name; this._data = []; this._metrics = new Metrics(); this._options = options || { changeTimestamp: false }; if (this._options.db) { this.db(this._options.db); } // Create an object to store internal protected data this._metaData = {}; this._deferQueue = { insert: [], update: [], remove: [], upsert: [], async: [] }; this._deferThreshold = { insert: 100, update: 100, remove: 100, upsert: 100 }; this._deferTime = { insert: 1, update: 1, remove: 1, upsert: 1 }; this._deferredCalls = true; // Set the subset to itself since it is the root collection this.subsetOf(this); }; Shared.addModule('Collection', Collection); Shared.mixin(Collection.prototype, 'Mixin.Common'); Shared.mixin(Collection.prototype, 'Mixin.Events'); Shared.mixin(Collection.prototype, 'Mixin.ChainReactor'); Shared.mixin(Collection.prototype, 'Mixin.CRUD'); Shared.mixin(Collection.prototype, 'Mixin.Constants'); Shared.mixin(Collection.prototype, 'Mixin.Triggers'); Shared.mixin(Collection.prototype, 'Mixin.Sorting'); Shared.mixin(Collection.prototype, 'Mixin.Matching'); Shared.mixin(Collection.prototype, 'Mixin.Updating'); Shared.mixin(Collection.prototype, 'Mixin.Tags'); Metrics = _dereq_('./Metrics'); KeyValueStore = _dereq_('./KeyValueStore'); Path = _dereq_('./Path'); IndexHashMap = _dereq_('./IndexHashMap'); IndexBinaryTree = _dereq_('./IndexBinaryTree'); Index2d = _dereq_('./Index2d'); Db = Shared.modules.Db; Overload = _dereq_('./Overload'); ReactorIO = _dereq_('./ReactorIO'); Condition = _dereq_('./Condition'); sharedPathSolver = new Path(); /** * Gets / sets the deferred calls flag. If set to true (default) * then operations on large data sets can be broken up and done * over multiple CPU cycles (creating an async state). For purely * synchronous behaviour set this to false. * @param {Boolean=} val The value to set. * @returns {Boolean} */ Shared.synthesize(Collection.prototype, 'deferredCalls'); /** * Gets / sets the current state. * @param {String=} val The name of the state to set. * @returns {*} */ Shared.synthesize(Collection.prototype, 'state'); /** * Gets / sets the name of the collection. * @param {String=} val The name of the collection to set. * @returns {*} */ Shared.synthesize(Collection.prototype, 'name'); /** * Gets / sets the metadata stored in the collection. * @param {Object=} val The data to set. * @returns {*} */ Shared.synthesize(Collection.prototype, 'metaData'); /** * Gets / sets boolean to determine if the collection should be * capped or not. * @param {Boolean=} val The value to set. * @returns {*} */ Shared.synthesize(Collection.prototype, 'capped'); /** * Gets / sets capped collection size. This is the maximum number * of records that the capped collection will store. * @param {Number=} val The value to set. * @returns {*} */ Shared.synthesize(Collection.prototype, 'cappedSize'); /** * Adds a job id to the async queue to signal to other parts * of the application that some async work is currently being * done. * @param {String} key The id of the async job. * @private */ Collection.prototype._asyncPending = function (key) { this._deferQueue.async.push(key); }; /** * Removes a job id from the async queue to signal to other * parts of the application that some async work has been * completed. If no further async jobs exist on the queue then * the "ready" event is emitted from this collection instance. * @param {String} key The id of the async job. * @private */ Collection.prototype._asyncComplete = function (key) { // Remove async flag for this type var index = this._deferQueue.async.indexOf(key); while (index > -1) { this._deferQueue.async.splice(index, 1); index = this._deferQueue.async.indexOf(key); } if (this._deferQueue.async.length === 0) { this.deferEmit('ready'); } }; /** * Get the data array that represents the collection's data. * This data is returned by reference and should not be altered outside * of the provided CRUD functionality of the collection as doing so * may cause unstable index behaviour within the collection. * @returns {Array} */ Collection.prototype.data = function () { return this._data; }; /** * Drops a collection and all it's stored data from the database. * @param {Function=} callback A callback method to call once the * operation has completed. * @returns {boolean} True on success, false on failure. */ Collection.prototype.drop = function (callback) { var key; if (!this.isDropped()) { if (this._db && this._db._collection && this._name) { if (this.debug()) { console.log(this.logIdentifier() + ' Dropping'); } this._state = 'dropped'; this.emit('drop', this); delete this._db._collection[this._name]; // Remove any reactor IO chain links if (this._collate) { for (key in this._collate) { if (this._collate.hasOwnProperty(key)) { this.collateRemove(key); } } } delete this._primaryKey; delete this._primaryIndex; delete this._primaryCrc; delete this._crcLookup; delete this._data; delete this._metrics; delete this._listeners; if (callback) { callback.call(this, false, true); } return true; } } else { if (callback) { callback.call(this, false, true); } return true; } if (callback) { callback.call(this, false, true); } return false; }; /** * Gets / sets the primary key for this collection. * @param {String=} keyName The name of the primary key. * @returns {*} */ Collection.prototype.primaryKey = function (keyName) { if (keyName !== undefined) { if (this._primaryKey !== keyName) { var oldKey = this._primaryKey; this._primaryKey = keyName; // Set the primary key index primary key this._primaryIndex.primaryKey(keyName); // Rebuild the primary key index this.rebuildPrimaryKeyIndex(); // Propagate change down the chain this.chainSend('primaryKey', { keyName: keyName, oldData: oldKey }); } return this; } return this._primaryKey; }; /** * Handles insert events and routes changes to binds and views as required. * @param {Array} inserted An array of inserted documents. * @param {Array} failed An array of documents that failed to insert. * @private */ Collection.prototype._onInsert = function (inserted, failed) { this.emit('insert', inserted, failed); }; /** * Handles update events and routes changes to binds and views as required. * @param {Array} items An array of updated documents. * @private */ Collection.prototype._onUpdate = function (items) { this.emit('update', items); }; /** * Handles remove events and routes changes to binds and views as required. * @param {Array} items An array of removed documents. * @private */ Collection.prototype._onRemove = function (items) { this.emit('remove', items); }; /** * Handles any change to the collection by updating the * lastChange timestamp on the collection's metaData. This * only happens if the changeTimestamp option is enabled * on the collection (it is disabled by default). * @private */ Collection.prototype._onChange = function () { if (this._options.changeTimestamp) { // Record the last change timestamp this._metaData.lastChange = this.serialiser.convert(new Date()); } }; /** * Gets / sets the db instance this class instance belongs to. * @param {Db=} db The db instance. * @returns {*} */ Shared.synthesize(Collection.prototype, 'db', function (db) { if (db) { if (this.primaryKey() === '_id') { // Set primary key to the db's key by default this.primaryKey(db.primaryKey()); // Apply the same debug settings this.debug(db.debug()); } } return this.$super.apply(this, arguments); }); /** * Gets / sets mongodb emulation mode. * @param {Boolean=} val True to enable, false to disable. * @returns {*} */ Shared.synthesize(Collection.prototype, 'mongoEmulation'); Collection.prototype.setData = new Overload('Collection.prototype.setData', { /** * Sets the collection's data to the array / documents passed. If any * data already exists in the collection it will be removed before the * new data is set via the remove() method, and the remove event will * fire as well. * @name setData * @method Collection.setData * @param {Array|Object} data The array of documents or a single document * that will be set as the collections data. */ '*': function (data) { return this.$main.call(this, data, {}); }, /** * Sets the collection's data to the array / documents passed. If any * data already exists in the collection it will be removed before the * new data is set via the remove() method, and the remove event will * fire as well. * @name setData * @method Collection.setData * @param {Array|Object} data The array of documents or a single document * that will be set as the collections data. * @param {Object} options Optional options object. */ '*, object': function (data, options) { return this.$main.call(this, data, options); }, /** * Sets the collection's data to the array / documents passed. If any * data already exists in the collection it will be removed before the * new data is set via the remove() method, and the remove event will * fire as well. * @name setData * @method Collection.setData * @param {Array|Object} data The array of documents or a single document * that will be set as the collections data. * @param {Function} callback Optional callback function. */ '*, function': function (data, callback) { return this.$main.call(this, data, {}, callback); }, /** * Sets the collection's data to the array / documents passed. If any * data already exists in the collection it will be removed before the * new data is set via the remove() method, and the remove event will * fire as well. * @name setData * @method Collection.setData * @param {Array|Object} data The array of documents or a single document * that will be set as the collections data. * @param {*} options Optional options object. * @param {Function} callback Optional callback function. */ '*, *, function': function (data, options, callback) { return this.$main.call(this, data, options, callback); }, /** * Sets the collection's data to the array / documents passed. If any * data already exists in the collection it will be removed before the * new data is set via the remove() method, and the remove event will * fire as well. * @name setData * @method Collection.setData * @param {Array|Object} data The array of documents or a single document * that will be set as the collections data. * @param {*} options Optional options object. * @param {*} callback Optional callback function. */ '*, *, *': function (data, options, callback) { return this.$main.call(this, data, options, callback); }, /** * Sets the collection's data to the array / documents passed. If any * data already exists in the collection it will be removed before the * new data is set via the remove() method, and the remove event will * fire as well. * @name setData * @method Collection.setData * @param {Array|Object} data The array of documents or a single document * that will be set as the collections data. * @param {Object} options Optional options object. * @param {Function} callback Optional callback function. */ '$main': function (data, options, callback) { if (this.isDropped()) { throw(this.logIdentifier() + ' Cannot operate in a dropped state!'); } if (data) { var deferredSetting = this.deferredCalls(), oldData = [].concat(this._data); // Switch off deferred calls since setData should be // a synchronous call this.deferredCalls(false); options = this.options(options); if (options.$decouple) { data = this.decouple(data); } if (!(data instanceof Array)) { data = [data]; } // Remove all items from the collection this.remove({}); // Insert the new data this.insert(data); // Switch deferred calls back to previous settings this.deferredCalls(deferredSetting); this._onChange(); this.emit('setData', this._data, oldData); } if (callback) { callback.call(this); } return this; } }); /** * Drops and rebuilds the primary key index for all documents * in the collection. * @param {Object=} options An optional options object. * @private */ Collection.prototype.rebuildPrimaryKeyIndex = function (options) { options = options || { $ensureKeys: undefined, $violationCheck: undefined }; var ensureKeys = options && options.$ensureKeys !== undefined ? options.$ensureKeys : true, violationCheck = options && options.$violationCheck !== undefined ? options.$violationCheck : true, arr, arrCount, arrItem, pIndex = this._primaryIndex, crcIndex = this._primaryCrc, crcLookup = this._crcLookup, pKey = this._primaryKey, jString; // Drop the existing primary index pIndex.truncate(); crcIndex.truncate(); crcLookup.truncate(); // Loop the data and check for a primary key in each object arr = this._data; arrCount = arr.length; while (arrCount--) { arrItem = arr[arrCount]; if (ensureKeys) { // Make sure the item has a primary key this.ensurePrimaryKey(arrItem); } if (violationCheck) { // Check for primary key violation if (!pIndex.uniqueSet(arrItem[pKey], arrItem)) { // Primary key violation throw(this.logIdentifier() + ' Call to setData on collection failed because your data violates the primary key unique constraint. One or more documents are using the same primary key: ' + arrItem[this._primaryKey]); } } else { pIndex.set(arrItem[pKey], arrItem); } // Generate a hash string jString = this.hash(arrItem); crcIndex.set(arrItem[pKey], jString); crcLookup.set(jString, arrItem); } }; /** * Checks for a primary key on the document and assigns one if none * currently exists. * @param {Object} obj The object to check a primary key against. * @private */ Collection.prototype.ensurePrimaryKey = function (obj) { if (obj[this._primaryKey] === undefined) { // Assign a primary key automatically obj[this._primaryKey] = this.objectId(); } }; /** * Clears all data from the collection. * @returns {Collection} */ Collection.prototype.truncate = function () { var i; if (this.isDropped()) { throw(this.logIdentifier() + ' Cannot operate in a dropped state!'); } // TODO: This should use remove so that chain reactor events are properly // TODO: handled, but ensure that chunking is switched off this.emit('truncate', this._data); // Clear all the data from the collection this._data.length = 0; // Re-create the primary index data this._primaryIndex = new KeyValueStore('primary', {primaryKey: this.primaryKey()}); this._primaryCrc = new KeyValueStore('primaryCrc', {primaryKey: this.primaryKey()}); this._crcLookup = new KeyValueStore('crcLookup', {primaryKey: this.primaryKey()}); // Re-create any existing collection indexes // TODO: This might not be the most efficient way to do this, perhaps just re-creating // the indexes would be faster than calling rebuild? for (i in this._indexByName) { if (this._indexByName.hasOwnProperty(i)) { this._indexByName[i].rebuild(); } } this._onChange(); this.emit('immediateChange', {type: 'truncate'}); this.deferEmit('change', {type: 'truncate'}); return this; }; /** * Inserts a new document or updates an existing document in a * collection depending on if a matching primary key exists in * the collection already or not. * * If the document contains a primary key field (based on the * collections's primary key) then the database will search for * an existing document with a matching id. If a matching * document is found, the document will be updated. Any keys that * match keys on the existing document will be overwritten with * new data. Any keys that do not currently exist on the document * will be added to the document. * * If the document does not contain an id or the id passed does * not match an existing document, an insert is performed instead. * If no id is present a new primary key id is provided for the * document and the document is inserted. * * @param {Object} obj The document object to upsert or an array * containing documents to upsert. * @param {Function=} callback Optional callback method. * @returns {Array} An array containing an object for each operation * performed. Each object contains two keys, "op" contains either "none", * "insert" or "update" depending on the type of operation that was * performed and "result" contains the return data from the operation * used. */ Collection.prototype.upsert = function (obj, callback) { if (this.isDropped()) { throw(this.logIdentifier() + ' Cannot operate in a dropped state!'); } if (obj) { var queue = this._deferQueue.upsert, deferThreshold = this._deferThreshold.upsert, returnData = {}, query, i; // Determine if the object passed is an array or not if (obj instanceof Array) { if (this._deferredCalls && obj.length > deferThreshold) { // Break up upsert into blocks this._deferQueue.upsert = queue.concat(obj); this._asyncPending('upsert'); // Fire off the insert queue handler this.processQueue('upsert', callback); return {}; } else { // Loop the array and upsert each item returnData = []; for (i = 0; i < obj.length; i++) { returnData.push(this.upsert(obj[i])[0]); } if (callback) { callback.call(this, returnData); } return returnData; } } // Determine if the operation is an insert or an update if (obj[this._primaryKey]) { // Check if an object with this primary key already exists query = {}; query[this._primaryKey] = obj[this._primaryKey]; if (this._primaryIndex.lookup(query)[0]) { // The document already exists with this id, this operation is an update returnData.op = 'update'; } else { // No document with this id exists, this operation is an insert returnData.op = 'insert'; } } else { // The document passed does not contain an id, this operation is an insert returnData.op = 'insert'; } switch (returnData.op) { case 'insert': returnData.result = this.insert(obj, callback); break; case 'update': returnData.result = this.update(query, obj, {}, callback); break; default: break; } if (callback) { callback.call(this, [returnData]); } return [returnData]; } else { if (callback) { callback.call(this, {op: 'none'}); } } return {}; }; /** * Executes a method against each document that matches query and returns an * array of documents that may have been modified by the method. * @param {Object=} query The optional query object. * @param {Object=} options Optional options object. If you specify an options object * you MUST also specify a query object. * @param {Function} func The method that each document is passed to. If this method * returns false for a particular document it is excluded from the results. If you * return a modified object from the one passed to it will be included in the results * as the modified version but will not affect the data in the collection at all. * Your function will be called with a single object as the first argument and will * be called once for every document in your initial query result. * @returns {Array} */ Collection.prototype.filter = function (query, options, func) { var temp; if (typeof query === 'function') { func = query; query = {}; options = {}; } if (typeof options === 'function') { if (func) { temp = func; } func = options; options = temp || {}; } return (this.find(query, options)).filter(func); }; /** * Executes a method against each document that matches query and then executes * an update based on the return data of the method. * @param {Object} query The query object. * @param {Function} func The method that each document is passed to. If this method * returns false for a particular document it is excluded from the update. * @param {Object=} options Optional options object passed to the initial find call. * @returns {Array} */ Collection.prototype.filterUpdate = function (query, func, options) { var items = this.find(query, options), results = [], singleItem, singleQuery, singleUpdate, pk = this.primaryKey(), i; for (i = 0; i < items.length; i++) { singleItem = items[i]; singleUpdate = func(singleItem); if (singleUpdate) { singleQuery = {}; singleQuery[pk] = singleItem[pk]; results.push(this.update(singleQuery, singleUpdate)); } } return results; }; /** * Modifies an existing document or documents in a collection. * This will update all matches for 'query' with the data held * in 'update'. It will not overwrite the matched documents * with the update document. * * @param {Object} query The query that must be matched for a * document to be operated on. * @param {Object} update The object containing updated * key/values. Any keys that match keys on the existing document * will be overwritten with this data. Any keys that do not * currently exist on the document will be added to the document. * @param {Object=} options An options object. * @param {Function=} callback The callback method to call when * the update is complete. * @returns {Array} The items that were updated. */ Collection.prototype.update = function (query, update, options, callback) { if (this.isDropped()) { throw(this.logIdentifier() + ' Cannot operate in a dropped state!'); } // Convert queries from mongo dot notation to forerunner queries if (this.mongoEmulation()) { this.convertToFdb(query); this.convertToFdb(update); } else { // Decouple the update data update = this.decouple(update); } // Detect $replace operations and set flag if (update.$replace) { // Make sure we have an options object options = options || {}; // Set the $replace flag in the options object options.$replace = true; // Move the replacement object out into the main update object update = update.$replace; } // Handle transform update = this.transformIn(update); return this._handleUpdate(query, update, options, callback); }; /** * Handles the update operation that was initiated by a call to update(). * @param {Object} query The query that must be matched for a * document to be operated on. * @param {Object} update The object containing updated * key/values. Any keys that match keys on the existing document * will be overwritten with this data. Any keys that do not * currently exist on the document will be added to the document. * @param {Object=} options An options object. * @param {Function=} callback The callback method to call when * the update is complete. * @returns {Array} The items that were updated. * @private */ Collection.prototype._handleUpdate = function (query, update, options, callback) { var self = this, op = this._metrics.create('update'), dataSet, updated, updateCall = function (referencedDoc) { var oldDoc = self.decouple(referencedDoc), newDoc, triggerOperation, result; if (self.willTrigger(self.TYPE_UPDATE, self.PHASE_BEFORE) || self.willTrigger(self.TYPE_UPDATE, self.PHASE_AFTER)) { newDoc = self.decouple(referencedDoc); triggerOperation = { type: 'update', query: self.decouple(query), update: self.decouple(update), options: self.decouple(options), op: op }; // Update newDoc with the update criteria so we know what the data will look // like AFTER the update is processed result = self.updateObject(newDoc, triggerOperation.update, triggerOperation.query, triggerOperation.options, ''); if (self.processTrigger(triggerOperation, self.TYPE_UPDATE, self.PHASE_BEFORE, referencedDoc, newDoc) !== false) { // No triggers complained so let's execute the replacement of the existing // object with the new one self._removeFromIndexes(referencedDoc); result = self.updateObject(referencedDoc, newDoc, triggerOperation.query, triggerOperation.options, ''); self._insertIntoIndexes(referencedDoc); // NOTE: If for some reason we would only like to fire this event if changes are actually going // to occur on the object from the proposed update then we can add "result &&" to the if self.processTrigger(triggerOperation, self.TYPE_UPDATE, self.PHASE_AFTER, oldDoc, newDoc); } else { // Trigger cancelled operation so tell result that it was not updated result = false; } } else { // No triggers complained so let's execute the replacement of the existing // object with the new one self._removeFromIndexes(referencedDoc); result = self.updateObject(referencedDoc, update, query, options, ''); self._insertIntoIndexes(referencedDoc); } return result; }; op.start(); op.time('Retrieve documents to update'); dataSet = this.find(query, {$decouple: false}); op.time('Retrieve documents to update'); if (dataSet.length) { op.time('Update documents'); updated = dataSet.filter(updateCall); op.time('Update documents'); if (updated.length) { if (this.debug()) { console.log(this.logIdentifier() + ' Updated some data'); } op.time('Resolve chains'); if (this.chainWillSend()) { this.chainSend('update', { query: query, update: update, dataSet: this.decouple(updated) }, options); } op.time('Resolve chains'); this._onUpdate(updated); this._onChange(); if (callback) { callback.call(this, updated || []); } this.emit('immediateChange', {type: 'update', data: updated}); this.deferEmit('change', {type: 'update', data: updated}); } else { if (callback) { callback.call(this, updated || []); } } } else { if (callback) { callback.call(this, updated || []); } } op.stop(); // TODO: Should we decouple the updated array before return by default? return updated || []; }; /** * Replaces an existing object with data from the new object without * breaking data references. It does this by removing existing keys * from the base object and then adding the passed object's keys to * the existing base object, thereby maintaining any references to * the existing base object but effectively replacing the object with * the new one. * @param {Object} currentObj The base object to alter. * @param {Object} newObj The new object to overwrite the existing one * with. * @returns {*} Chain. * @private */ Collection.prototype._replaceObj = function (currentObj, newObj) { var i; // Check if the new document has a different primary key value from the existing one // Remove item from indexes this._removeFromIndexes(currentObj); // Remove existing keys from current object for (i in currentObj) { if (currentObj.hasOwnProperty(i)) { delete currentObj[i]; } } // Add new keys to current object for (i in newObj) { if (newObj.hasOwnProperty(i)) { currentObj[i] = newObj[i]; } } // Update the item in the primary index if (!this._insertIntoIndexes(currentObj)) { throw(this.logIdentifier() + ' Primary key violation in update! Key violated: ' + currentObj[this._primaryKey]); } // Update the object in the collection data //this._data.splice(this._data.indexOf(currentObj), 1, newObj); return this; }; /** * Helper method to update a document via it's id. * @param {String} id The id of the document. * @param {Object} update The object containing the key/values to * update to. * @param {Object=} options An options object. * @param {Function=} callback The callback method to call when * the update is complete. * @returns {Object} The document that was updated or undefined * if no document was updated. */ Collection.prototype.updateById = function (id, update, options, callback) { var searchObj = {}, wrappedCallback; searchObj[this._primaryKey] = id; if (callback) { wrappedCallback = function (data) { callback(data[0]); }; } return this.update(searchObj, update, options, wrappedCallback)[0]; }; /** * Internal method for document updating. * @param {Object} doc The document to update. * @param {Object} update The object with key/value pairs to update * the document with. * @param {Object} query The query object that we need to match to * perform an update. * @param {Object} options An options object. * @param {String} path The current recursive path. * @param {String} opType The type of update operation to perform, * if none is specified default is to set new data against matching * fields. * @returns {Boolean} True if the document was updated with new / * changed data or false if it was not updated because the data was * the same. * @private */ Collection.prototype.updateObject = function (doc, update, query, options, path, opType) { // TODO: This method is long, try to break it into smaller pieces update = this.decouple(update); // Clear leading dots from path path = path || ''; if (path.substr(0, 1) === '.') { path = path.substr(1, path.length -1); } //var oldDoc = this.decouple(doc), var updated = false, recurseUpdated = false, operation, tmpArray, tmpIndex, tmpCount, tempIndex, tempKey, replaceObj, pk, pathInstance, sourceIsArray, updateIsArray, i; // Check if we have a $replace flag in the options object if (options && options.$replace === true) { operation = true; replaceObj = update; pk = this.primaryKey(); // Loop the existing item properties and compare with // the replacement (never remove primary key) for (tempKey in doc) { if (doc.hasOwnProperty(tempKey) && tempKey !== pk) { if (replaceObj[tempKey] === undefined) { // The new document doesn't have this field, remove it from the doc this._updateUnset(doc, tempKey); updated = true; } } } // Loop the new item props and update the doc for (tempKey in replaceObj) { if (replaceObj.hasOwnProperty(tempKey) && tempKey !== pk) { this._updateOverwrite(doc, tempKey, replaceObj[tempKey]); updated = true; } } // Early exit return updated; } // DEVS PLEASE NOTE -- Early exit could have occurred above and code below will never be reached - Rob Evans - CEO - 05/08/2016 // Loop each key in the update object for (i in update) { if (update.hasOwnProperty(i)) { // Reset operation flag operation = false; // Check if the property starts with a dollar (function) if (!operation && i.substr(0, 1) === '$') { // Check for commands switch (i) { case '$key': case '$index': case '$data': case '$min': case '$max': // Ignore some operators operation = true; break; case '$each': operation = true; // Loop over the array of updates and run each one tmpCount = update.$each.length; for (tmpIndex = 0; tmpIndex < tmpCount; tmpIndex++) { recurseUpdated = this.updateObject(doc, update.$each[tmpIndex], query, options, path); if (recurseUpdated) { updated = true; } } updated = updated || recurseUpdated; break; case '$replace': operation = true; replaceObj = update.$replace; pk = this.primaryKey(); // Loop the existing item properties and compare with // the replacement (never remove primary key) for (tempKey in doc) { if (doc.hasOwnProperty(tempKey) && tempKey !== pk) { if (replaceObj[tempKey] === undefined) { // The new document doesn't have this field, remove it from the doc this._updateUnset(doc, tempKey); updated = true; } } } // Loop the new item props and update the doc for (tempKey in replaceObj) { if (replaceObj.hasOwnProperty(tempKey) && tempKey !== pk) { this._updateOverwrite(doc, tempKey, replaceObj[tempKey]); updated = true; } } break; case '$overwrite': case '$inc': case '$push': case '$splicePush': case '$splicePull': case '$addToSet': case '$pull': case '$pop': case '$move': case '$cast': case '$unset': case '$pullAll': case '$mul': case '$rename': case '$toggle': operation = true; // Now run the operation recurseUpdated = this.updateObject(doc, update[i], query, options, path, i); updated = updated || recurseUpdated; break; default: // This is an unknown operator or just // a normal field with a dollar starting // character so ignore it operation = false; break; } } // Check if the key has a .$ at the end, denoting an array lookup if (!operation && this._isPositionalKey(i)) { operation = true; // Modify i to be the name of the field i = i.substr(0, i.length - 2); pathInstance = new Path(path + '.' + i); // Check if the key is an array and has items if (doc[i] && doc[i] instanceof Array && doc[i].length) { tmpArray = []; // Loop the array and find matches to our search for (tmpIndex = 0; tmpIndex < doc[i].length; tmpIndex++) { if (this._match(doc[i][tmpIndex], pathInstance.value(query)[0], options, '', {})) { tmpArray.push(tmpIndex); } } // Loop the items that matched and update them for (tmpIndex = 0; tmpIndex < tmpArray.length; tmpIndex++) { recurseUpdated = this.updateObject(doc[i][tmpArray[tmpIndex]], update[i + '.$'], query, options, path + '.' + i, opType); updated = updated || recurseUpdated; } } } if (!operation) { if (!opType && typeof(update[i]) === 'object') { if (doc[i] !== null && typeof(doc[i]) === 'object') { // Check if we are dealing with arrays sourceIsArray = doc[i] instanceof Array; updateIsArray = update[i] instanceof Array; if (sourceIsArray || updateIsArray) { // Check if the update is an object and the doc is an array if (!updateIsArray && sourceIsArray) { // Update is an object, source is an array so match the array items // with our query object to find the one to update inside this array // Loop the array and find matches to our search for (tmpIndex = 0; tmpIndex < doc[i].length; tmpIndex++) { recurseUpdated = this.updateObject(doc[i][tmpIndex], update[i], query, options, path + '.' + i, opType); updated = updated || recurseUpdated; } } else { // Either both source and update are arrays or the update is // an array and the source is not, so set source to update if (doc[i] !== update[i]) { this._updateProperty(doc, i, update[i]); updated = true; } } } else { // Check if the doc key is a date instance if (doc[i] instanceof Date) { // The doc key is a date object, assign the new date this._updateProperty(doc, i, update[i]); updated = true; } else { // The doc key is an object so traverse the // update further recurseUpdated = this.updateObject(doc[i], update[i], query, options, path + '.' + i, opType); updated = updated || recurseUpdated; } } } else { if (doc[i] !== update[i]) { this._updateProperty(doc, i, update[i]); updated = true; } } } else { switch (opType) { case '$inc': var doUpdate = true; // Check for a $min / $max operator if (update[i] > 0) { if (update.$max) { // Check current value if (doc[i] >= update.$max) { // Don't update doUpdate = false; } } } else if (update[i] < 0) { if (update.$min) { // Check current value if (doc[i] <= update.$min) { // Don't update doUpdate = false; } } } if (doUpdate) { this._updateIncrement(doc, i, update[i]); updated = true; } break; case '$cast': // Casts a property to the type specified if it is not already // that type. If the cast is an array or an object and the property // is not already that type a new array or object is created and // set to the property, overwriting the previous value switch (update[i]) { case 'array': if (!(doc[i] instanceof Array)) { // Cast to an array this._updateProperty(doc, i, update.$data || []); updated = true; } break; case 'object': if (!(doc[i] instanceof Object) || (doc[i] instanceof Array)) { // Cast to an object this._updateProperty(doc, i, update.$data || {}); updated = true; } break; case 'number': if (typeof doc[i] !== 'number') { // Cast to a number this._updateProperty(doc, i, Number(doc[i])); updated = true; } break; case 'string': if (typeof doc[i] !== 'string') { // Cast to a string this._updateProperty(doc, i, String(doc[i])); updated = true; } break; default: throw(this.logIdentifier() + ' Cannot update cast to unknown type: ' + update[i]); } break; case '$push': // Check if the target key is undefined and if so, create an array if (doc[i] === undefined) { // Initialise a new array this._updateProperty(doc, i, []); } // Check that the target key is an array if (doc[i] instanceof Array) { // Check for a $position modifier with an $each if (update[i].$position !== undefined && update[i].$each instanceof Array) { // Grab the position to insert at tempIndex = update[i].$position; // Loop the each array and push each item tmpCount = update[i].$each.length; for (tmpIndex = 0; tmpIndex < tmpCount; tmpIndex++) { this._updateSplicePush(doc[i], tempIndex + tmpIndex, update[i].$each[tmpIndex]); } } else if (update[i].$each instanceof Array) { // Do a loop over the each to push multiple items tmpCount = update[i].$each.length; for (tmpIndex = 0; tmpIndex < tmpCount; tmpIndex++) { this._updatePush(doc[i], update[i].$each[tmpIndex]); } } else { // Do a standard push this._updatePush(doc[i], update[i]); } updated = true; } else { throw(this.logIdentifier() + ' Cannot push to a key that is not an array! (' + i + ')'); } break; case '$pull': if (doc[i] instanceof Array) { tmpArray = []; // Loop the array and find matches to our search for (tmpIndex = 0; tmpIndex < doc[i].length; tmpIndex++) { if (this._match(doc[i][tmpIndex], update[i], options, '', {})) { tmpArray.push(tmpIndex); } } tmpCount = tmpArray.length; // Now loop the pull array and remove items to be pulled while (tmpCount--) { this._updatePull(doc[i], tmpArray[tmpCount]); updated = true; } } break; case '$pullAll': if (doc[i] instanceof Array) { if (update[i] instanceof Array) { tmpArray = doc[i]; tmpCount = tmpArray.length; if (tmpCount > 0) { // Now loop the pull array and remove items to be pulled while (tmpCount--) { for (tempIndex = 0; tempIndex < update[i].length; tempIndex++) { if (tmpArray[tmpCount] === update[i][tempIndex]) { this._updatePull(doc[i], tmpCount); tmpCount--; updated = true; } } if (tmpCount < 0) { break; } } } } else { throw(this.logIdentifier() + ' Cannot pullAll without being given an array of values to pull! (' + i + ')'); } } break; case '$addToSet': // Check if the target key is undefined and if so, create an array if (doc[i] === undefined) { // Initialise a new array this._updateProperty(doc, i, []); } // Check that the target key is an array if (doc[i] instanceof Array) { // Loop the target array and check for existence of item var targetArr = doc[i], targetArrIndex, targetArrCount = targetArr.length, objHash, addObj = true, optionObj = (options && options.$addToSet), hashMode, pathSolver; // Check if we have an options object for our operation if (update[i].$key) { hashMode = false; pathSolver = new Path(update[i].$key); objHash = pathSolver.value(update[i])[0]; // Remove the key from the object before we add it delete update[i].$key; } else if (optionObj && optionObj.key) { hashMode = false; pathSolver = new Path(optionObj.key); objHash = pathSolver.value(update[i])[0]; } else { objHash = this.jStringify(update[i]); hashMode = true; } for (targetArrIndex = 0; targetArrIndex < targetArrCount; targetArrIndex++) { if (hashMode) { // Check if objects match via a string hash (JSON) if (this.jStringify(targetArr[targetArrIndex]) === objHash) { // The object already exists, don't add it addObj = false; break; } } else { // Check if objects match based on the path if (objHash === pathSolver.value(targetArr[targetArrIndex])[0]) { // The object already exists, don't add it addObj = false; break; } } } if (addObj) { this._updatePush(doc[i], update[i]); updated = true; } } else { throw(this.logIdentifier() + ' Cannot addToSet on a key that is not an array! (' + i + ')'); } break; case '$splicePush': // Check if the target key is undefined and if so, create an array if (doc[i] === undefined) { // Initialise a new array this._updateProperty(doc, i, []); } // Check that the target key is an array if (doc[i] instanceof Array) { tempIndex = update.$index; if (tempIndex !== undefined) { delete update.$index; // Check for out of bounds index if (tempIndex > doc[i].length) { tempIndex = doc[i].length; } this._updateSplicePush(doc[i], tempIndex, update[i]); updated = true; } else { throw(this.logIdentifier() + ' Cannot splicePush without a $index integer value!'); } } else { throw(this.logIdentifier() + ' Cannot splicePush with a key that is not an array! (' + i + ')'); } break; case '$splicePull': // Check that the target key is not undefined if (doc[i] !== undefined) { // Check that the target key is an array if (doc[i] instanceof Array) { tempIndex = update[i].$index; if (tempIndex !== undefined) { // Check for in bounds index if (tempIndex < doc[i].length) { this._updateSplicePull(doc[i], tempIndex); updated = true; } } else { throw(this.logIdentifier() + ' Cannot splicePull without a $index integer value!'); } } else { throw(this.logIdentifier() + ' Cannot splicePull from a key that is not an array! (' + i + ')'); } } break; case '$move': if (doc[i] instanceof Array) { // Loop the array and find matches to our search for (tmpIndex = 0; tmpIndex < doc[i].length; tmpIndex++) { if (this._match(doc[i][tmpIndex], update[i], options, '', {})) { var moveToIndex = update.$index; if (moveToIndex !== undefined) { delete update.$index; this._updateSpliceMove(doc[i], tmpIndex, moveToIndex); updated = true; } else { throw(this.logIdentifier() + ' Cannot move without a $index integer value!'); } break; } } } else { throw(this.logIdentifier() + ' Cannot move on a key that is not an array! (' + i + ')'); } break; case '$mul': this._updateMultiply(doc, i, update[i]); updated = true; break; case '$rename': this._updateRename(doc, i, update[i]); updated = true; break; case '$overwrite': this._updateOverwrite(doc, i, update[i]); updated = true; break; case '$unset': this._updateUnset(doc, i); updated = true; break; case '$clear': this._updateClear(doc, i); updated = true; break; case '$pop': if (doc[i] instanceof Array) { if (this._updatePop(doc[i], update[i])) { updated = true; } } else { throw(this.logIdentifier() + ' Cannot pop from a key that is not an array! (' + i + ')'); } break; case '$toggle': // Toggle the boolean property between true and false this._updateProperty(doc, i, !doc[i]); updated = true; break; default: if (doc[i] !== update[i]) { this._updateProperty(doc, i, update[i]); updated = true; } break; } } } } } return updated; }; /** * Determines if the passed key has an array positional mark * (a dollar at the end of its name). * @param {String} key The key to check. * @returns {Boolean} True if it is a positional or false if not. * @private */ Collection.prototype._isPositionalKey = function (key) { return key.substr(key.length - 2, 2) === '.$'; }; /** * Removes any documents from the collection that match the search * query key/values. * @param {Object=} query The query identifying the documents to remove. If no * query object is passed, all documents will be removed from the collection. * @param {Object=} options An options object. * @param {Function=} callback A callback method. * @returns {Array} An array of the documents that were removed. */ Collection.prototype.remove = function (query, options, callback) { if (this.isDropped()) { throw(this.logIdentifier() + ' Cannot operate in a dropped state!'); } var self = this, dataSet, index, arrIndex, returnArr, removeMethod, triggerOperation, doc, newDoc; if (typeof(options) === 'function') { callback = options; options = {}; } // Convert queries from mongo dot notation to forerunner queries if (this.mongoEmulation()) { this.convertToFdb(query); } if (query instanceof Array) { returnArr = []; for (arrIndex = 0; arrIndex < query.length; arrIndex++) { returnArr.push(this.remove(query[arrIndex], {noEmit: true})); } if (!options || (options && !options.noEmit)) { this._onRemove(returnArr); } if (callback) { callback.call(this, false, returnArr); } return returnArr; } else { returnArr = []; dataSet = this.find(query, {$decouple: false}); if (dataSet.length) { removeMethod = function (dataItem) { // Remove the item from the collection's indexes self._removeFromIndexes(dataItem); // Remove data from internal stores index = self._data.indexOf(dataItem); self._dataRemoveAtIndex(index); returnArr.push(dataItem); }; // Remove the data from the collection for (var i = 0; i < dataSet.length; i++) { doc = dataSet[i]; if (self.willTrigger(self.TYPE_REMOVE, self.PHASE_BEFORE) || self.willTrigger(self.TYPE_REMOVE, self.PHASE_AFTER)) { triggerOperation = { type: 'remove' }; newDoc = self.decouple(doc); if (self.processTrigger(triggerOperation, self.TYPE_REMOVE, self.PHASE_BEFORE, newDoc, newDoc) !== false) { // The trigger didn't ask to cancel so execute the removal method removeMethod(doc); self.processTrigger(triggerOperation, self.TYPE_REMOVE, self.PHASE_AFTER, newDoc, newDoc); } } else { // No triggers to execute removeMethod(doc); } } if (returnArr.length) { //op.time('Resolve chains'); self.chainSend('remove', { query: query, dataSet: returnArr }, options); //op.time('Resolve chains'); if (!options || (options && !options.noEmit)) { this._onRemove(returnArr); } this._onChange(); this.emit('immediateChange', {type: 'remove', data: returnArr}); this.deferEmit('change', {type: 'remove', data: returnArr}); } } if (callback) { callback.call(this, false, returnArr); } return returnArr; } }; /** * Helper method that removes a document that matches the given id. * @param {String} id The id of the document to remove. * @returns {Object} The document that was removed or undefined if * nothing was removed. */ Collection.prototype.removeById = function (id) { var searchObj = {}; searchObj[this._primaryKey] = id; return this.remove(searchObj)[0]; }; /** * Processes a deferred action queue. * @param {String} type The queue name to process. * @param {Function} callback A method to call when the queue has processed. * @param {Object=} resultObj A temp object to hold results in. */ Collection.prototype.processQueue = function (type, callback, resultObj) { var self = this, queue = this._deferQueue[type], deferThreshold = this._deferThreshold[type], deferTime = this._deferTime[type], dataArr, result; resultObj = resultObj || { deferred: true }; if (queue.length) { // Process items up to the threshold if (queue.length > deferThreshold) { // Grab items up to the threshold value dataArr = queue.splice(0, deferThreshold); } else { // Grab all the remaining items dataArr = queue.splice(0, queue.length); } result = self[type](dataArr); switch (type) { case 'insert': resultObj.inserted = resultObj.inserted || []; resultObj.failed = resultObj.failed || []; resultObj.inserted = resultObj.inserted.concat(result.inserted); resultObj.failed = resultObj.failed.concat(result.failed); break; } // Queue another process setTimeout(function () { self.processQueue.call(self, type, callback, resultObj); }, deferTime); } else { if (callback) { callback.call(this, resultObj); } this._asyncComplete(type); } // Check if all queues are complete if (!this.isProcessingQueue()) { this.deferEmit('queuesComplete'); } }; /** * Checks if any CRUD operations have been deferred and are still waiting to * be processed. * @returns {Boolean} True if there are still deferred CRUD operations to process * or false if all queues are clear. */ Collection.prototype.isProcessingQueue = function () { var i; for (i in this._deferQueue) { if (this._deferQueue.hasOwnProperty(i)) { if (this._deferQueue[i].length) { return true; } } } return false; }; /** * Inserts a document or array of documents into the collection. * @param {Object|Array} data Either a document object or array of document * @param {Number=} index Optional index to insert the record at. * @param {Collection.insertCallback=} callback Optional callback called * once the insert is complete. */ Collection.prototype.insert = function (data, index, callback) { if (this.isDropped()) { throw(this.logIdentifier() + ' Cannot operate in a dropped state!'); } if (typeof(index) === 'function') { callback = index; index = this._data.length; } else if (index === undefined) { index = this._data.length; } data = this.transformIn(data); return this._insertHandle(data, index, callback); }; /** * The insert operation's callback. * @name Collection.insertCallback * @callback Collection.insertCallback * @param {Object} result The result object will contain two arrays (inserted * and failed) which represent the documents that did get inserted and those * that didn't for some reason (usually index violation). Failed items also * contain a reason. Inspect the failed array for further information. * * A third field called "deferred" is a boolean value to indicate if the * insert operation was deferred across more than one CPU cycle (to avoid * blocking the main thread). */ /** * Inserts a document or array of documents into the collection. * @param {Object|Array} data Either a document object or array of document * @param {Number=} index Optional index to insert the record at. * @param {Collection.insertCallback=} callback Optional callback called * once the insert is complete. */ Collection.prototype._insertHandle = function (data, index, callback) { var //self = this, queue = this._deferQueue.insert, deferThreshold = this._deferThreshold.insert, //deferTime = this._deferTime.insert, inserted = [], failed = [], insertResult, resultObj, i; if (data instanceof Array) { // Check if there are more insert items than the insert defer // threshold, if so, break up inserts so we don't tie up the // ui or thread if (this._deferredCalls && data.length > deferThreshold) { // Break up insert into blocks this._deferQueue.insert = queue.concat(data); this._asyncPending('insert'); // Fire off the insert queue handler this.processQueue('insert', callback); return; } else { // Loop the array and add items for (i = 0; i < data.length; i++) { insertResult = this._insert(data[i], index + i); if (insertResult === true) { inserted.push(data[i]); } else { failed.push({ doc: data[i], reason: insertResult }); } } } } else { // Store the data item insertResult = this._insert(data, index); if (insertResult === true) { inserted.push(data); } else { failed.push({ doc: data, reason: insertResult }); } } resultObj = { deferred: false, inserted: inserted, failed: failed }; this._onInsert(inserted, failed); if (callback) { callback.call(this, resultObj); } this._onChange(); this.emit('immediateChange', {type: 'insert', data: inserted, failed: failed}); this.deferEmit('change', {type: 'insert', data: inserted, failed: failed}); return resultObj; }; /** * Internal method to insert a document into the collection. Will * check for index violations before allowing the document to be inserted. * @param {Object} doc The document to insert after passing index violation * tests. * @param {Number=} index Optional index to insert the document at. * @returns {Boolean|Object} True on success, false if no document passed, * or an object containing details about an index violation if one occurred. * @private */ Collection.prototype._insert = function (doc, index) { if (doc) { var self = this, indexViolation, triggerOperation, insertMethod, newDoc, capped = this.capped(), cappedSize = this.cappedSize(); this.ensurePrimaryKey(doc); // Check indexes are not going to be broken by the document indexViolation = this.insertIndexViolation(doc); insertMethod = function (doc) { // Add the item to the collection's indexes self._insertIntoIndexes(doc); // Check index overflow if (index > self._data.length) { index = self._data.length; } // Insert the document self._dataInsertAtIndex(index, doc); // Check capped collection status and remove first record // if we are over the threshold if (capped && self._data.length > cappedSize) { // Remove the first item in the data array self.removeById(self._data[0][self._primaryKey]); } //op.time('Resolve chains'); if (self.chainWillSend()) { self.chainSend('insert', { dataSet: self.decouple([doc]) }, { index: index }); } //op.time('Resolve chains'); }; if (!indexViolation) { if (self.willTrigger(self.TYPE_INSERT, self.PHASE_BEFORE) || self.willTrigger(self.TYPE_INSERT, self.PHASE_AFTER)) { triggerOperation = { type: 'insert' }; if (self.processTrigger(triggerOperation, self.TYPE_INSERT, self.PHASE_BEFORE, {}, doc) !== false) { insertMethod(doc); if (self.willTrigger(self.TYPE_INSERT, self.PHASE_AFTER)) { // Clone the doc so that the programmer cannot update the internal document // on the "after" phase trigger newDoc = self.decouple(doc); self.processTrigger(triggerOperation, self.TYPE_INSERT, self.PHASE_AFTER, {}, newDoc); } } else { // The trigger just wants to cancel the operation return 'Trigger cancelled operation'; } } else { // No triggers to execute insertMethod(doc); } return true; } else { return 'Index violation in index: ' + indexViolation; } } return 'No document passed to insert'; }; /** * Inserts a document into the internal collection data array at * Inserts a document into the internal collection data array at * the specified index. * @param {Number} index The index to insert at. * @param {Object} doc The document to insert. * @private */ Collection.prototype._dataInsertAtIndex = function (index, doc) { this._data.splice(index, 0, doc); }; /** * Removes a document from the internal collection data array at * the specified index. * @param {Number} index The index to remove from. * @private */ Collection.prototype._dataRemoveAtIndex = function (index) { this._data.splice(index, 1); }; /** * Replaces all data in the collection's internal data array with * the passed array of data. * @param {Array} data The array of data to replace existing data with. * @private */ Collection.prototype._dataReplace = function (data) { // Clear the array - using a while loop with pop is by far the // fastest way to clear an array currently while (this._data.length) { this._data.pop(); } // Append new items to the array this._data = this._data.concat(data); }; /** * Inserts a document into the collection indexes. * @param {Object} doc The document to insert. * @private */ Collection.prototype._insertIntoIndexes = function (doc) { var arr = this._indexByName, arrIndex, violated, hash = this.hash(doc), pk = this._primaryKey; // Insert to primary key index violated = this._primaryIndex.uniqueSet(doc[pk], doc); this._primaryCrc.uniqueSet(doc[pk], hash); this._crcLookup.uniqueSet(hash, doc); // Insert into other indexes for (arrIndex in arr) { if (arr.hasOwnProperty(arrIndex)) { arr[arrIndex].insert(doc); } } return violated; }; /** * Removes a document from the collection indexes. * @param {Object} doc The document to remove. * @private */ Collection.prototype._removeFromIndexes = function (doc) { var arr = this._indexByName, arrIndex, hash = this.hash(doc), pk = this._primaryKey; // Remove from primary key index this._primaryIndex.unSet(doc[pk]); this._primaryCrc.unSet(doc[pk]); this._crcLookup.unSet(hash); // Remove from other indexes for (arrIndex in arr) { if (arr.hasOwnProperty(arrIndex)) { arr[arrIndex].remove(doc); } } }; /** * Updates collection index data for the passed document. * @param {Object} oldDoc The old document as it was before the update (must be * actual reference to original document). * @param {Object} newDoc The document as it now is after the update. * @private */ Collection.prototype._updateIndexes = function (oldDoc, newDoc) { this._removeFromIndexes(oldDoc); this._insertIntoIndexes(newDoc); }; /** * Rebuild collection indexes. * @private */ Collection.prototype._rebuildIndexes = function () { var arr = this._indexByName, arrIndex; // Remove from other indexes for (arrIndex in arr) { if (arr.hasOwnProperty(arrIndex)) { arr[arrIndex].rebuild(); } } }; /** * Uses the passed query to generate a new collection with results * matching the query parameters. * * @param {Object} query The query object to generate the subset with. * @param {Object=} options An options object. * @returns {*} */ Collection.prototype.subset = function (query, options) { var result = this.find(query, options), coll; coll = new Collection(); coll.db(this._db); coll.subsetOf(this) .primaryKey(this._primaryKey) .setData(result); return coll; }; /** * Gets / sets the collection that this collection is a subset of. * @param {Collection=} collection The collection to set as the parent of this subset. * @returns {Collection} */ Shared.synthesize(Collection.prototype, 'subsetOf'); /** * Checks if the collection is a subset of the passed collection. * @param {Collection} collection The collection to test against. * @returns {Boolean} True if the passed collection is the parent of * the current collection. */ Collection.prototype.isSubsetOf = function (collection) { return this._subsetOf === collection; }; /** * Find the distinct values for a specified field across a single collection and * returns the values as a results array. * @param {String} path The field path to return distinct values for e.g. "person.name". * @param {Object=} query The query to use to filter the documents used to return values from. * @param {Object=} options The query options to use when running the query. * @returns {Array} */ Collection.prototype.distinct = function (path, query, options) { if (this.isDropped()) { throw(this.logIdentifier() + ' Cannot operate in a dropped state!'); } var data = this.find(query, options), pathSolver = new Path(), valueUsed = {}, distinctValues = [], aggregatedValues, value, i; // Get path values as an array aggregatedValues = pathSolver.aggregate(data, path); // Loop the data and build array of distinct values for (i = 0; i < aggregatedValues.length; i++) { value = aggregatedValues[i]; if (value && !valueUsed[value]) { valueUsed[value] = true; distinctValues.push(value); } } return distinctValues; }; /** * Helper method to find a document by it's id. * @param {String} id The id of the document. * @param {Object=} options The options object, allowed keys are sort and limit. * @returns {Array} The items that were updated. */ Collection.prototype.findById = function (id, options) { var searchObj = {}; searchObj[this._primaryKey] = id; return this.find(searchObj, options)[0]; }; /** * Finds all documents that contain the passed string or search object * regardless of where the string might occur within the document. This * will match strings from the start, middle or end of the document's * string (partial match). * @param {String} search The string to search for. Case sensitive. * @param {Object=} options A standard find() options object. * @returns {Array} An array of documents that matched the search string. */ Collection.prototype.peek = function (search, options) { // Loop all items var arr = this._data, arrCount = arr.length, arrIndex, arrItem, tempColl = new Collection(), typeOfSearch = typeof search; if (typeOfSearch === 'string') { for (arrIndex = 0; arrIndex < arrCount; arrIndex++) { // Get json representation of object arrItem = this.jStringify(arr[arrIndex]); // Check if string exists in object json if (arrItem.indexOf(search) > -1) { // Add this item to the temp collection tempColl.insert(arr[arrIndex]); } } return tempColl.find({}, options); } else { return this.find(search, options); } }; /** * Provides a query plan / operations log for a query. * @param {Object} query The query to execute. * @param {Object=} options Optional options object. * @returns {Object} The query plan. */ Collection.prototype.explain = function (query, options) { var result = this.find(query, options); return result.__fdbOp._data; }; /** * Generates an options object with default values or adds default * values to a passed object if those values are not currently set * to anything. * @param {Object=} obj Optional options object to modify. * @returns {Object} The options object. */ Collection.prototype.options = function (obj) { obj = obj || {}; obj.$decouple = obj.$decouple !== undefined ? obj.$decouple : true; obj.$explain = obj.$explain !== undefined ? obj.$explain : false; return obj; }; /** * Queries the collection based on the query object passed. * @param {Object} query The query key/values that a document must match in * order for it to be returned in the result array. * @param {Object=} options An optional options object. * @param {Function=} callback !! DO NOT USE, THIS IS NON-OPERATIONAL !! * Optional callback. If specified the find process * will not return a value and will assume that you wish to operate under an * async mode. This will break up large find requests into smaller chunks and * process them in a non-blocking fashion allowing large datasets to be queried * without causing the browser UI to pause. Results from this type of operation * will be passed back to the callback once completed. * * @returns {Array} The results array from the find operation, containing all * documents that matched the query. */ Collection.prototype.find = function (query, options, callback) { // Convert queries from mongo dot notation to forerunner queries if (this.mongoEmulation()) { this.convertToFdb(query); } if (callback) { // Check the size of the collection's data array // Split operation into smaller tasks and callback when complete callback.call(this, 'Callbacks for the find() operation are not yet implemented!', []); return []; } return this._find.call(this, query, options, callback); }; Collection.prototype._find = function (query, options) { if (this.isDropped()) { throw(this.logIdentifier() + ' Cannot operate in a dropped state!'); } // TODO: This method is quite long, break into smaller pieces query = query || {}; var op = this._metrics.create('find'), pk = this.primaryKey(), self = this, analysis, scanLength, requiresTableScan = true, resultArr, joinIndex, joinSource = {}, joinQuery, joinPath, joinSourceKey, joinSourceType, joinSourceIdentifier, joinSourceData, resultRemove = [], i, j, k, fieldListOn = [], fieldListOff = [], elemMatchPathSolver, elemMatchSubArr, elemMatchSpliceArr, matcherTmpOptions = {}, result, cursor = {}, pathSolver, waterfallCollection, matcher; if (!(options instanceof Array)) { options = this.options(options); } matcher = function (doc) { return self._match(doc, query, options, 'and', matcherTmpOptions); }; op.start(); if (query) { // Check if the query is an array (multi-operation waterfall query) if (query instanceof Array) { waterfallCollection = this; // Loop the query operations for (i = 0; i < query.length; i++) { // Execute each operation and pass the result into the next // query operation waterfallCollection = waterfallCollection.subset(query[i], options && options[i] ? options[i] : {}); } return waterfallCollection.find(); } // Pre-process any data-changing query operators first if (query.$findSub) { // Check we have all the parts we need if (!query.$findSub.$path) { throw('$findSub missing $path property!'); } return this.findSub( query.$findSub.$query, query.$findSub.$path, query.$findSub.$subQuery, query.$findSub.$subOptions ); } if (query.$findSubOne) { // Check we have all the parts we need if (!query.$findSubOne.$path) { throw('$findSubOne missing $path property!'); } return this.findSubOne( query.$findSubOne.$query, query.$findSubOne.$path, query.$findSubOne.$subQuery, query.$findSubOne.$subOptions ); } // Get query analysis to execute best optimised code path op.time('analyseQuery'); analysis = this._analyseQuery(self.decouple(query), options, op); op.time('analyseQuery'); op.data('analysis', analysis); // Check if the query tries to limit by data that would only exist after // the join operation has been completed if (analysis.hasJoin && analysis.queriesJoin) { // The query has a join and tries to limit by it's joined data // Get references to the join sources op.time('joinReferences'); for (joinIndex = 0; joinIndex < analysis.joinsOn.length; joinIndex++) { joinSourceData = analysis.joinsOn[joinIndex]; joinSourceKey = joinSourceData.key; joinSourceType = joinSourceData.type; joinSourceIdentifier = joinSourceData.id; joinPath = new Path(analysis.joinQueries[joinSourceKey]); joinQuery = joinPath.value(query)[0]; joinSource[joinSourceIdentifier] = this._db[joinSourceType](joinSourceKey).subset(joinQuery); // Remove join clause from main query delete query[analysis.joinQueries[joinSourceKey]]; } op.time('joinReferences'); } // Check if an index lookup can be used to return this result if (analysis.indexMatch.length && (!options || (options && !options.$skipIndex))) { op.data('index.potential', analysis.indexMatch); op.data('index.used', analysis.indexMatch[0].index); // Get the data from the index op.time('indexLookup'); resultArr = [].concat(analysis.indexMatch[0].lookup) || []; op.time('indexLookup'); // Check if the index coverage is all keys, if not we still need to table scan it if (analysis.indexMatch[0].keyData.totalKeyCount === analysis.indexMatch[0].keyData.score) { // Don't require a table scan to find relevant documents requiresTableScan = false; } } else { op.flag('usedIndex', false); } if (requiresTableScan) { if (resultArr && resultArr.length) { scanLength = resultArr.length; op.time('tableScan: ' + scanLength); // Filter the source data and return the result resultArr = resultArr.filter(matcher); } else { // Filter the source data and return the result scanLength = this._data.length; op.time('tableScan: ' + scanLength); resultArr = this._data.filter(matcher); } op.time('tableScan: ' + scanLength); } // Order the array if we were passed a sort clause if (options.$orderBy) { op.time('sort'); resultArr = this.sort(options.$orderBy, resultArr); op.time('sort'); } if (options.$page !== undefined && options.$limit !== undefined) { // Record paging data cursor.page = options.$page; cursor.pages = Math.ceil(resultArr.length / options.$limit); cursor.records = resultArr.length; // Check if we actually need to apply the paging logic if (options.$page && options.$limit > 0) { op.data('cursor', cursor); // Skip to the page specified based on limit resultArr.splice(0, options.$page * options.$limit); } } if (options.$skip) { cursor.skip = options.$skip; // Skip past the number of records specified resultArr.splice(0, options.$skip); op.data('skip', options.$skip); } if (options.$limit && resultArr && resultArr.length > options.$limit) { cursor.limit = options.$limit; resultArr.length = options.$limit; op.data('limit', options.$limit); } if (options.$decouple) { // Now decouple the data from the original objects op.time('decouple'); resultArr = this.decouple(resultArr); op.time('decouple'); op.data('flag.decouple', true); } // Now process any joins on the final data if (options.$join) { resultRemove = resultRemove.concat(this.applyJoin(resultArr, options.$join, joinSource)); op.data('flag.join', true); } // Process removal queue if (resultRemove.length) { op.time('removalQueue'); this.spliceArrayByIndexList(resultArr, resultRemove); op.time('removalQueue'); } if (options.$transform) { op.time('transform'); for (i = 0; i < resultArr.length; i++) { resultArr.splice(i, 1, options.$transform(resultArr[i])); } op.time('transform'); op.data('flag.transform', true); } // Process transforms if (this._transformEnabled && this._transformOut) { op.time('transformOut'); resultArr = this.transformOut(resultArr); op.time('transformOut'); } op.data('results', resultArr.length); } else { resultArr = []; } // Check for an $as operator in the options object and if it exists // iterate over the fields and generate a rename function that will // operate over the entire returned data array and rename each object's // fields to their new names // TODO: Enable $as in collection find to allow renaming fields /*if (options.$as) { renameFieldPath = new Path(); renameFieldMethod = function (obj, oldFieldPath, newFieldName) { renameFieldPath.path(oldFieldPath); renameFieldPath.rename(newFieldName); }; for (i in options.$as) { if (options.$as.hasOwnProperty(i)) { } } }*/ if (!options.$aggregate) { // Generate a list of fields to limit data by // Each property starts off being enabled by default (= 1) then // if any property is explicitly specified as 1 then all switch to // zero except _id. // // Any that are explicitly set to zero are switched off. op.time('scanFields'); for (i in options) { if (options.hasOwnProperty(i) && i.indexOf('$') !== 0) { if (options[i] === 1) { fieldListOn.push(i); } else if (options[i] === 0) { fieldListOff.push(i); } } } op.time('scanFields'); // Limit returned fields by the options data if (fieldListOn.length || fieldListOff.length) { op.data('flag.limitFields', true); op.data('limitFields.on', fieldListOn); op.data('limitFields.off', fieldListOff); op.time('limitFields'); // We have explicit fields switched on or off for (i = 0; i < resultArr.length; i++) { result = resultArr[i]; for (j in result) { if (result.hasOwnProperty(j)) { if (fieldListOn.length) { // We have explicit fields switched on so remove all fields // that are not explicitly switched on // Check if the field name is not the primary key if (j !== pk) { if (fieldListOn.indexOf(j) === -1) { // This field is not in the on list, remove it delete result[j]; } } } if (fieldListOff.length) { // We have explicit fields switched off so remove fields // that are explicitly switched off if (fieldListOff.indexOf(j) > -1) { // This field is in the off list, remove it delete result[j]; } } } } } op.time('limitFields'); } // Now run any projections on the data required if (options.$elemMatch) { op.data('flag.elemMatch', true); op.time('projection-elemMatch'); for (i in options.$elemMatch) { if (options.$elemMatch.hasOwnProperty(i)) { elemMatchPathSolver = new Path(i); // Loop the results array for (j = 0; j < resultArr.length; j++) { elemMatchSubArr = elemMatchPathSolver.value(resultArr[j])[0]; // Check we have a sub-array to loop if (elemMatchSubArr && elemMatchSubArr.length) { // Loop the sub-array and check for projection query matches for (k = 0; k < elemMatchSubArr.length; k++) { // Check if the current item in the sub-array matches the projection query if (self._match(elemMatchSubArr[k], options.$elemMatch[i], options, '', {})) { // The item matches the projection query so set the sub-array // to an array that ONLY contains the matching item and then // exit the loop since we only want to match the first item elemMatchPathSolver.set(resultArr[j], i, [elemMatchSubArr[k]]); break; } } } } } } op.time('projection-elemMatch'); } if (options.$elemsMatch) { op.data('flag.elemsMatch', true); op.time('projection-elemsMatch'); for (i in options.$elemsMatch) { if (options.$elemsMatch.hasOwnProperty(i)) { elemMatchPathSolver = new Path(i); // Loop the results array for (j = 0; j < resultArr.length; j++) { elemMatchSubArr = elemMatchPathSolver.value(resultArr[j])[0]; // Check we have a sub-array to loop if (elemMatchSubArr && elemMatchSubArr.length) { elemMatchSpliceArr = []; // Loop the sub-array and check for projection query matches for (k = 0; k < elemMatchSubArr.length; k++) { // Check if the current item in the sub-array matches the projection query if (self._match(elemMatchSubArr[k], options.$elemsMatch[i], options, '', {})) { // The item matches the projection query so add it to the final array elemMatchSpliceArr.push(elemMatchSubArr[k]); } } // Now set the final sub-array to the matched items elemMatchPathSolver.set(resultArr[j], i, elemMatchSpliceArr); } } } } op.time('projection-elemsMatch'); } } // Process aggregation if (options.$aggregate) { op.data('flag.aggregate', true); op.time('aggregate'); pathSolver = new Path(); resultArr = pathSolver.aggregate(resultArr, options.$aggregate); op.time('aggregate'); } // Now process any $groupBy clause if (options.$groupBy) { op.data('flag.group', true); op.time('group'); resultArr = this.group(options.$groupBy, resultArr); op.time('group'); } op.stop(); resultArr.__fdbOp = op; resultArr.$cursor = cursor; return resultArr; }; /** * Returns one document that satisfies the specified query criteria. If multiple * documents satisfy the query, this method returns the first document to match * the query. * @returns {*} */ Collection.prototype.findOne = function () { return (this.find.apply(this, arguments))[0]; }; /** * Gets the index in the collection data array of the first item matched by * the passed query object. * @param {Object} query The query to run to find the item to return the index of. * @param {Object=} options An options object. * @returns {Number} */ Collection.prototype.indexOf = function (query, options) { var item = this.find(query, {$decouple: false})[0], sortedData; if (item) { if (!options || options && !options.$orderBy) { // Basic lookup from order of insert return this._data.indexOf(item); } else { // Trying to locate index based on query with sort order options.$decouple = false; sortedData = this.find(query, options); return sortedData.indexOf(item); } } return -1; }; /** * Returns the index of the document identified by the passed item's primary key. * @param {*} itemLookup The document whose primary key should be used to lookup * or the id to lookup. * @param {Object=} options An options object. * @returns {Number} The index the item with the matching primary key is occupying. */ Collection.prototype.indexOfDocById = function (itemLookup, options) { var item, sortedData; if (typeof itemLookup !== 'object') { item = this._primaryIndex.get(itemLookup); } else { item = this._primaryIndex.get(itemLookup[this._primaryKey]); } if (item) { if (!options || options && !options.$orderBy) { // Basic lookup return this._data.indexOf(item); } else { // Sorted lookup options.$decouple = false; sortedData = this.find({}, options); return sortedData.indexOf(item); } } return -1; }; /** * Removes a document from the collection by it's index in the collection's * data array. * @param {Number} index The index of the document to remove. * @returns {Object} The document that has been removed or false if none was * removed. */ Collection.prototype.removeByIndex = function (index) { var doc, docId; doc = this._data[index]; if (doc !== undefined) { doc = this.decouple(doc); docId = doc[this.primaryKey()]; return this.removeById(docId); } return false; }; /** * Gets / sets the collection transform options. * @param {Object} obj A collection transform options object. * @returns {*} */ Collection.prototype.transform = function (obj) { if (obj !== undefined) { if (typeof obj === "object") { if (obj.enabled !== undefined) { this._transformEnabled = obj.enabled; } if (obj.dataIn !== undefined) { this._transformIn = obj.dataIn; } if (obj.dataOut !== undefined) { this._transformOut = obj.dataOut; } } else { this._transformEnabled = obj !== false; } return this; } return { enabled: this._transformEnabled, dataIn: this._transformIn, dataOut: this._transformOut }; }; /** * Transforms data using the set transformIn method. * @param {Object} data The data to transform. * @returns {*} */ Collection.prototype.transformIn = function (data) { if (this._transformEnabled && this._transformIn) { if (data instanceof Array) { var finalArr = [], transformResult, i; for (i = 0; i < data.length; i++) { transformResult = this._transformIn(data[i]); // Support transforms returning multiple items if (transformResult instanceof Array) { finalArr = finalArr.concat(transformResult); } else { finalArr.push(transformResult); } } return finalArr; } else { return this._transformIn(data); } } return data; }; /** * Transforms data using the set transformOut method. * @param {Object} data The data to transform. * @returns {*} */ Collection.prototype.transformOut = function (data) { if (this._transformEnabled && this._transformOut) { if (data instanceof Array) { var finalArr = [], transformResult, i; for (i = 0; i < data.length; i++) { transformResult = this._transformOut(data[i]); // Support transforms returning multiple items if (transformResult instanceof Array) { finalArr = finalArr.concat(transformResult); } else { finalArr.push(transformResult); } } return finalArr; } else { return this._transformOut(data); } } return data; }; /** * Sorts an array of documents by the given sort path. * @param {*} sortObj The keys and orders the array objects should be sorted by. * @param {Array} arr The array of documents to sort. * @returns {Array} */ Collection.prototype.sort = function (sortObj, arr) { // Convert the index object to an array of key val objects var self = this, keys = sharedPathSolver.parse(sortObj, true); if (keys.length) { // Execute sort arr.sort(function (a, b) { // Loop the index array var i, indexData, result = 0; for (i = 0; i < keys.length; i++) { indexData = keys[i]; if (indexData.value === 1) { result = self.sortAsc(sharedPathSolver.get(a, indexData.path), sharedPathSolver.get(b, indexData.path)); } else if (indexData.value === -1) { result = self.sortDesc(sharedPathSolver.get(a, indexData.path), sharedPathSolver.get(b, indexData.path)); } if (result !== 0) { return result; } } return result; }); } return arr; }; /** * Groups an array of documents into multiple array fields, named by the value * of the given group path. * @param {*} groupObj The key path the array objects should be grouped by. * @param {Array} arr The array of documents to group. * @returns {Object} */ Collection.prototype.group = function (groupObj, arr) { // Convert the index object to an array of key val objects var keys = sharedPathSolver.parse(groupObj, true), groupPathSolver = new Path(), groupValue, groupResult = {}, keyIndex, i; if (keys.length) { for (keyIndex = 0; keyIndex < keys.length; keyIndex++) { groupPathSolver.path(keys[keyIndex].path); // Execute group for (i = 0; i < arr.length; i++) { groupValue = groupPathSolver.get(arr[i]); groupResult[groupValue] = groupResult[groupValue] || []; groupResult[groupValue].push(arr[i]); } } } return groupResult; }; // Commented as we have a new method that was originally implemented for binary trees. // This old method actually has problems with nested sort objects /*Collection.prototype.sortold = function (sortObj, arr) { // Make sure we have an array object arr = arr || []; var sortArr = [], sortKey, sortSingleObj; for (sortKey in sortObj) { if (sortObj.hasOwnProperty(sortKey)) { sortSingleObj = {}; sortSingleObj[sortKey] = sortObj[sortKey]; sortSingleObj.___fdbKey = String(sortKey); sortArr.push(sortSingleObj); } } if (sortArr.length < 2) { // There is only one sort criteria, do a simple sort and return it return this._sort(sortObj, arr); } else { return this._bucketSort(sortArr, arr); } };*/ /** * REMOVED AS SUPERCEDED BY BETTER SORT SYSTEMS * Takes array of sort paths and sorts them into buckets before returning final * array fully sorted by multi-keys. * @param keyArr * @param arr * @returns {*} * @private */ /*Collection.prototype._bucketSort = function (keyArr, arr) { var keyObj = keyArr.shift(), arrCopy, bucketData, bucketOrder, bucketKey, buckets, i, finalArr = []; if (keyArr.length > 0) { // Sort array by bucket key arr = this._sort(keyObj, arr); // Split items into buckets bucketData = this.bucket(keyObj.___fdbKey, arr); bucketOrder = bucketData.order; buckets = bucketData.buckets; // Loop buckets and sort contents for (i = 0; i < bucketOrder.length; i++) { bucketKey = bucketOrder[i]; arrCopy = [].concat(keyArr); finalArr = finalArr.concat(this._bucketSort(arrCopy, buckets[bucketKey])); } return finalArr; } else { return this._sort(keyObj, arr); } };*/ /** * Takes an array of objects and returns a new object with the array items * split into buckets by the passed key. * @param {String} key The key to split the array into buckets by. * @param {Array} arr An array of objects. * @returns {Object} */ /*Collection.prototype.bucket = function (key, arr) { var i, oldField, field, fieldArr = [], buckets = {}; for (i = 0; i < arr.length; i++) { field = String(arr[i][key]); if (oldField !== field) { fieldArr.push(field); oldField = field; } buckets[field] = buckets[field] || []; buckets[field].push(arr[i]); } return { buckets: buckets, order: fieldArr }; };*/ /** * Sorts array by individual sort path. * @param {String} key The path to sort by. * @param {Array} arr The array of objects to sort. * @returns {Array|*} * @private */ Collection.prototype._sort = function (key, arr) { var self = this, sorterMethod, pathSolver = new Path(), dataPath = pathSolver.parse(key, true)[0]; pathSolver.path(dataPath.path); if (dataPath.value === 1) { // Sort ascending sorterMethod = function (a, b) { var valA = pathSolver.value(a)[0], valB = pathSolver.value(b)[0]; return self.sortAsc(valA, valB); }; } else if (dataPath.value === -1) { // Sort descending sorterMethod = function (a, b) { var valA = pathSolver.value(a)[0], valB = pathSolver.value(b)[0]; return self.sortDesc(valA, valB); }; } else { throw(this.logIdentifier() + ' $orderBy clause has invalid direction: ' + dataPath.value + ', accepted values are 1 or -1 for ascending or descending!'); } return arr.sort(sorterMethod); }; /** * Internal method that takes a search query and options and * returns an object containing details about the query which * can be used to optimise the search. * * @param {Object} query The search query to analyse. * @param {Object} options The query options object. * @param {Operation} op The instance of the Operation class that * this operation is using to track things like performance and steps * taken etc. * @returns {Object} * @private */ Collection.prototype._analyseQuery = function (query, options, op) { var analysis = { queriesOn: [{id: '$collection.' + this._name, type: 'colletion', key: this._name}], indexMatch: [], hasJoin: false, queriesJoin: false, joinQueries: {}, query: query, options: options }, joinSourceIndex, joinSourceKey, joinSourceType, joinSourceIdentifier, joinMatch, joinSources = [], joinSourceReferences = [], queryPath, index, indexMatchData, indexRef, indexRefName, indexLookup, pathSolver, queryKeyCount, pkQueryType, lookupResult, i; // Check if the query is a primary key lookup op.time('checkIndexes'); pathSolver = new Path(); queryKeyCount = pathSolver.parseArr(query, { ignore:/\$/, verbose: true }).length; if (queryKeyCount) { if (query[this._primaryKey] !== undefined) { // Check suitability of querying key value index pkQueryType = typeof query[this._primaryKey]; if (pkQueryType === 'string' || pkQueryType === 'number' || query[this._primaryKey] instanceof Array) { // Return item via primary key possible op.time('checkIndexMatch: Primary Key'); lookupResult = [].concat(this._primaryIndex.lookup(query, options, op)); analysis.indexMatch.push({ lookup: lookupResult, keyData: { matchedKeys: [this._primaryKey], totalKeyCount: queryKeyCount, score: 1 }, index: this._primaryIndex }); op.time('checkIndexMatch: Primary Key'); } } // Check if an index can speed up the query for (i in this._indexById) { if (this._indexById.hasOwnProperty(i)) { indexRef = this._indexById[i]; indexRefName = indexRef.name(); op.time('checkIndexMatch: ' + indexRefName); indexMatchData = indexRef.match(query, options); if (indexMatchData.score > 0) { // This index can be used, store it indexLookup = [].concat(indexRef.lookup(query, options, op)); analysis.indexMatch.push({ lookup: indexLookup, keyData: indexMatchData, index: indexRef }); } op.time('checkIndexMatch: ' + indexRefName); if (indexMatchData.score === queryKeyCount) { // Found an optimal index, do not check for any more break; } } } op.time('checkIndexes'); // Sort array descending on index key count (effectively a measure of relevance to the query) if (analysis.indexMatch.length > 1) { op.time('findOptimalIndex'); analysis.indexMatch.sort(function (a, b) { if (a.keyData.score > b.keyData.score) { // This index has a higher score than the other return -1; } if (a.keyData.score < b.keyData.score) { // This index has a lower score than the other return 1; } // The indexes have the same score but can still be compared by the number of records // they return from the query. The fewer records they return the better so order by // record count if (a.keyData.score === b.keyData.score) { return a.lookup.length - b.lookup.length; } }); op.time('findOptimalIndex'); } } // Check for join data if (options.$join) { analysis.hasJoin = true; // Loop all join operations for (joinSourceIndex = 0; joinSourceIndex < options.$join.length; joinSourceIndex++) { // Loop the join sources and keep a reference to them for (joinSourceKey in options.$join[joinSourceIndex]) { if (options.$join[joinSourceIndex].hasOwnProperty(joinSourceKey)) { joinMatch = options.$join[joinSourceIndex][joinSourceKey]; joinSourceType = joinMatch.$sourceType || 'collection'; joinSourceIdentifier = '$' + joinSourceType + '.' + joinSourceKey; joinSources.push({ id: joinSourceIdentifier, type: joinSourceType, key: joinSourceKey }); // Check if the join uses an $as operator if (options.$join[joinSourceIndex][joinSourceKey].$as !== undefined) { joinSourceReferences.push(options.$join[joinSourceIndex][joinSourceKey].$as); } else { joinSourceReferences.push(joinSourceKey); } } } } // Loop the join source references and determine if the query references // any of the sources that are used in the join. If there no queries against // joined sources the find method can use a code path optimised for this. // Queries against joined sources requires the joined sources to be filtered // first and then joined so requires a little more work. for (index = 0; index < joinSourceReferences.length; index++) { // Check if the query references any source data that the join will create queryPath = this._queryReferencesSource(query, joinSourceReferences[index], ''); if (queryPath) { analysis.joinQueries[joinSources[index].key] = queryPath; analysis.queriesJoin = true; } } analysis.joinsOn = joinSources; analysis.queriesOn = analysis.queriesOn.concat(joinSources); } return analysis; }; /** * Checks if the passed query references a source object (such * as a collection) by name. * @param {Object} query The query object to scan. * @param {String} sourceName The source name to scan for in the query. * @param {String=} path The path to scan from. * @returns {*} * @private */ Collection.prototype._queryReferencesSource = function (query, sourceName, path) { var i; for (i in query) { if (query.hasOwnProperty(i)) { // Check if this key is a reference match if (i === sourceName) { if (path) { path += '.'; } return path + i; } else { if (typeof(query[i]) === 'object') { // Recurse if (path) { path += '.'; } path += i; return this._queryReferencesSource(query[i], sourceName, path); } } } } return false; }; /** * Returns the number of documents currently in the collection. * @returns {Number} */ Collection.prototype.count = function (query, options) { if (!query) { return this._data.length; } else { // Run query and return count return this.find(query, options).length; } }; /** * Finds sub-documents from the collection's documents. * @param {Object} match The query object to use when matching * parent documents from which the sub-documents are queried. * @param {String} path The path string used to identify the * key in which sub-documents are stored in parent documents. * @param {Object=} subDocQuery The query to use when matching * which sub-documents to return. * @param {Object=} subDocOptions The options object to use * when querying for sub-documents. * @returns {*} */ Collection.prototype.findSub = function (match, path, subDocQuery, subDocOptions) { return this._findSub(this.find(match), path, subDocQuery, subDocOptions); }; Collection.prototype._findSub = function (docArr, path, subDocQuery, subDocOptions) { var pathHandler = new Path(path), docCount = docArr.length, docIndex, subDocArr, subDocCollection = new Collection('__FDB_temp_' + this.objectId()).db(this._db), subDocResults, resultObj = { parents: docCount, subDocTotal: 0, subDocs: [], pathFound: false, err: '' }; subDocOptions = subDocOptions || {}; for (docIndex = 0; docIndex < docCount; docIndex++) { subDocArr = pathHandler.value(docArr[docIndex])[0]; if (subDocArr) { subDocCollection.setData(subDocArr); subDocResults = subDocCollection.find(subDocQuery, subDocOptions); if (subDocOptions.$returnFirst && subDocResults.length) { return subDocResults[0]; } if (subDocOptions.$split) { resultObj.subDocs.push(subDocResults); } else { resultObj.subDocs = resultObj.subDocs.concat(subDocResults); } resultObj.subDocTotal += subDocResults.length; resultObj.pathFound = true; } } // Drop the sub-document collection subDocCollection.drop(); if (!resultObj.pathFound) { resultObj.err = 'No objects found in the parent documents with a matching path of: ' + path; } // Check if the call should not return stats, if so return only subDocs array if (subDocOptions.$stats) { return resultObj; } else { return resultObj.subDocs; } }; /** * Finds the first sub-document from the collection's documents * that matches the subDocQuery parameter. * @param {Object} match The query object to use when matching * parent documents from which the sub-documents are queried. * @param {String} path The path string used to identify the * key in which sub-documents are stored in parent documents. * @param {Object=} subDocQuery The query to use when matching * which sub-documents to return. * @param {Object=} subDocOptions The options object to use * when querying for sub-documents. * @returns {Object} */ Collection.prototype.findSubOne = function (match, path, subDocQuery, subDocOptions) { return this.findSub(match, path, subDocQuery, subDocOptions)[0]; }; /** * Checks that the passed document will not violate any index rules if * inserted into the collection. * @param {Object} doc The document to check indexes against. * @returns {Boolean} Either false (no violation occurred) or true if * a violation was detected. */ Collection.prototype.insertIndexViolation = function (doc) { var indexViolated, arr = this._indexByName, arrIndex, arrItem; // Check the item's primary key is not already in use if (this._primaryIndex.get(doc[this._primaryKey])) { indexViolated = this._primaryIndex; } else { // Check violations of other indexes for (arrIndex in arr) { if (arr.hasOwnProperty(arrIndex)) { arrItem = arr[arrIndex]; if (arrItem.unique()) { if (arrItem.violation(doc)) { indexViolated = arrItem; break; } } } } } return indexViolated ? indexViolated.name() : false; }; /** * Creates an index on the specified keys. * @param {Object} keys The object containing keys to index. * @param {Object} options An options object. * @returns {*} */ Collection.prototype.ensureIndex = function (keys, options) { if (this.isDropped()) { throw(this.logIdentifier() + ' Cannot operate in a dropped state!'); } this._indexByName = this._indexByName || {}; this._indexById = this._indexById || {}; var index, time = { start: new Date().getTime() }; if (options) { if (options.type) { // Check if the specified type is available if (Shared.index[options.type]) { // We found the type, generate it index = new Shared.index[options.type](keys, options, this); } else { throw(this.logIdentifier() + ' Cannot create index of type "' + options.type + '", type not found in the index type register (Shared.index)'); } } else { // Create default index type index = new IndexHashMap(keys, options, this); } } else { // Default index = new IndexHashMap(keys, options, this); } // Check the index does not already exist if (this._indexByName[index.name()]) { // Index already exists return { err: 'Index with that name already exists' }; } /*if (this._indexById[index.id()]) { // Index already exists return { err: 'Index with those keys already exists' }; }*/ // Create the index index.rebuild(); // Add the index this._indexByName[index.name()] = index; this._indexById[index.id()] = index; time.end = new Date().getTime(); time.total = time.end - time.start; this._lastOp = { type: 'ensureIndex', stats: { time: time } }; return { index: index, id: index.id(), name: index.name(), state: index.state() }; }; /** * Gets an index by it's name. * @param {String} name The name of the index to retreive. * @returns {*} */ Collection.prototype.index = function (name) { if (this._indexByName) { return this._indexByName[name]; } }; /** * Gets the last reporting operation's details such as run time. * @returns {Object} */ Collection.prototype.lastOp = function () { return this._metrics.list(); }; /** * Generates a difference object that contains insert, update and remove arrays * representing the operations to execute to make this collection have the same * data as the one passed. * @param {Collection} collection The collection to diff against. * @returns {{}} */ Collection.prototype.diff = function (collection) { var diff = { insert: [], update: [], remove: [] }; var pk = this.primaryKey(), arr, arrIndex, arrItem, arrCount; // Check if the primary key index of each collection can be utilised if (pk !== collection.primaryKey()) { throw(this.logIdentifier() + ' Diffing requires that both collections have the same primary key!'); } // Use the collection primary key index to do the diff (super-fast) arr = collection._data; // Check if we have an array or another collection while (arr && !(arr instanceof Array)) { // We don't have an array, assign collection and get data collection = arr; arr = collection._data; } arrCount = arr.length; // Loop the collection's data array and check for matching items for (arrIndex = 0; arrIndex < arrCount; arrIndex++) { arrItem = arr[arrIndex]; // Check for a matching item in this collection if (this._primaryIndex.get(arrItem[pk])) { // Matching item exists, check if the data is the same if (this._primaryCrc.get(arrItem[pk]) !== collection._primaryCrc.get(arrItem[pk])) { // The documents exist in both collections but data differs, update required diff.update.push(arrItem); } } else { // The document is missing from this collection, insert required diff.insert.push(arrItem); } } // Now loop this collection's data and check for matching items arr = this._data; arrCount = arr.length; for (arrIndex = 0; arrIndex < arrCount; arrIndex++) { arrItem = arr[arrIndex]; if (!collection._primaryIndex.get(arrItem[pk])) { // The document does not exist in the other collection, remove required diff.remove.push(arrItem); } } return diff; }; Collection.prototype.collateAdd = new Overload('Collection.prototype.collateAdd', { /** * Adds a data source to collate data from and specifies the * key name to collate data to. * @func collateAdd * @memberof Collection * @param {Collection} collection The collection to collate data from. * @param {String=} keyName Optional name of the key to collate data to. * If none is provided the record CRUD is operated on the root collection * data. */ 'object, string': function (collection, keyName) { var self = this; self.collateAdd(collection, function (packet) { var obj1, obj2; switch (packet.type) { case 'insert': if (keyName) { obj1 = { $push: {} }; obj1.$push[keyName] = self.decouple(packet.data.dataSet); self.update({}, obj1); } else { self.insert(packet.data.dataSet); } break; case 'update': if (keyName) { obj1 = {}; obj2 = {}; obj1[keyName] = packet.data.query; obj2[keyName + '.$'] = packet.data.update; self.update(obj1, obj2); } else { self.update(packet.data.query, packet.data.update); } break; case 'remove': if (keyName) { obj1 = { $pull: {} }; obj1.$pull[keyName] = {}; obj1.$pull[keyName][self.primaryKey()] = packet.data.dataSet[0][collection.primaryKey()]; self.update({}, obj1); } else { self.remove(packet.data.dataSet); } break; default: } }); }, /** * Adds a data source to collate data from and specifies a process * method that will handle the collation functionality (for custom * collation). * @func collateAdd * @memberof Collection * @param {Collection} collection The collection to collate data from. * @param {Function} process The process method. */ 'object, function': function (collection, process) { if (typeof collection === 'string') { // The collection passed is a name, not a reference so get // the reference from the name collection = this._db.collection(collection, { autoCreate: false, throwError: false }); } if (collection) { this._collate = this._collate || {}; this._collate[collection.name()] = new ReactorIO(collection, this, process); return this; } else { throw('Cannot collate from a non-existent collection!'); } } }); Collection.prototype.collateRemove = function (collection) { if (typeof collection === 'object') { // We need to have the name of the collection to remove it collection = collection.name(); } if (collection) { // Drop the reactor IO chain node this._collate[collection].drop(); // Remove the collection data from the collate object delete this._collate[collection]; return this; } else { throw('No collection name passed to collateRemove() or collection not found!'); } }; /** * Creates a condition handler that will react to changes in data on the * collection. * @example Create a condition handler that reacts when data changes. * var coll = db.collection('test'), * condition = coll.when({_id: 'test1', val: 1}) * .then(function () { * console.log('Condition met!'); * }) * .else(function () { * console.log('Condition un-met'); * }); * * coll.insert({_id: 'test1', val: 1}); * * @see Condition * @param {Object} query The query that will trigger the condition's then() * callback. * @returns {Condition} */ Collection.prototype.when = function (query) { var queryId = this.objectId(); this._when = this._when || {}; this._when[queryId] = this._when[queryId] || new Condition(this, queryId, query); return this._when[queryId]; }; Db.prototype.collection = new Overload('Db.prototype.collection', { /** * Get a collection with no name (generates a random name). If the * collection does not already exist then one is created for that * name automatically. * @name collection * @method Db.collection * @func collection * @memberof Db * @returns {Collection} */ '': function () { return this.$main.call(this, { name: this.objectId() }); }, /** * Get a collection by name. If the collection does not already exist * then one is created for that name automatically. * @name collection * @method Db.collection * @func collection * @memberof Db * @param {Object} data An options object or a collection instance. * @returns {Collection} */ 'object': function (data) { // Handle being passed an instance if (data instanceof Collection) { if (data.state() !== 'droppped') { return data; } else { return this.$main.call(this, { name: data.name() }); } } return this.$main.call(this, data); }, /** * Get a collection by name. If the collection does not already exist * then one is created for that name automatically. * @name collection * @method Db.collection * @func collection * @memberof Db * @param {String} collectionName The name of the collection. * @returns {Collection} */ 'string': function (collectionName) { return this.$main.call(this, { name: collectionName }); }, /** * Get a collection by name. If the collection does not already exist * then one is created for that name automatically. * @name collection * @method Db.collection * @func collection * @memberof Db * @param {String} collectionName The name of the collection. * @param {String} primaryKey Optional primary key to specify the * primary key field on the collection objects. Defaults to "_id". * @returns {Collection} */ 'string, string': function (collectionName, primaryKey) { return this.$main.call(this, { name: collectionName, primaryKey: primaryKey }); }, /** * Get a collection by name. If the collection does not already exist * then one is created for that name automatically. * @name collection * @method Db.collection * @func collection * @memberof Db * @param {String} collectionName The name of the collection. * @param {Object} options An options object. * @returns {Collection} */ 'string, object': function (collectionName, options) { options.name = collectionName; return this.$main.call(this, options); }, /** * Get a collection by name. If the collection does not already exist * then one is created for that name automatically. * @name collection * @method Db.collection * @func collection * @memberof Db * @param {String} collectionName The name of the collection. * @param {String} primaryKey Optional primary key to specify the * primary key field on the collection objects. Defaults to "_id". * @param {Object} options An options object. * @returns {Collection} */ 'string, string, object': function (collectionName, primaryKey, options) { options.name = collectionName; options.primaryKey = primaryKey; return this.$main.call(this, options); }, '$main': function (options) { var self = this, name = options.name; if (!name) { if (!options || (options && options.throwError !== false)) { throw(this.logIdentifier() + ' Cannot get collection with undefined name!'); } return; } if (this._collection[name]) { return this._collection[name]; } if (options && options.autoCreate === false) { if (options && options.throwError !== false) { throw(this.logIdentifier() + ' Cannot get collection ' + name + ' because it does not exist and auto-create has been disabled!'); } return undefined; } if (this.debug()) { console.log(this.logIdentifier() + ' Creating collection ' + name); } this._collection[name] = this._collection[name] || new Collection(name, options).db(this); this._collection[name].mongoEmulation(this.mongoEmulation()); if (options.primaryKey !== undefined) { this._collection[name].primaryKey(options.primaryKey); } if (options.capped !== undefined) { // Check we have a size if (options.size !== undefined) { this._collection[name].capped(options.capped); this._collection[name].cappedSize(options.size); } else { throw(this.logIdentifier() + ' Cannot create a capped collection without specifying a size!'); } } // Listen for events on this collection so we can fire global events // on the database in response to it self._collection[name].on('change', function () { self.emit('change', self._collection[name], 'collection', name); }); self.deferEmit('create', self._collection[name], 'collection', name); return this._collection[name]; } }); /** * Determine if a collection with the passed name already exists. * @memberof Db * @param {String} viewName The name of the collection to check for. * @returns {boolean} */ Db.prototype.collectionExists = function (viewName) { return Boolean(this._collection[viewName]); }; /** * Returns an array of collections the DB currently has. * @memberof Db * @param {String|RegExp=} search The optional search string or * regular expression to use to match collection names against. * @returns {Array} An array of objects containing details of each * collection the database is currently managing. */ Db.prototype.collections = function (search) { var arr = [], collections = this._collection, collection, i; if (search) { if (!(search instanceof RegExp)) { // Turn the search into a regular expression search = new RegExp(search); } } for (i in collections) { if (collections.hasOwnProperty(i)) { collection = collections[i]; if (search) { if (search.exec(i)) { arr.push({ name: i, count: collection.count(), linked: collection.isLinked !== undefined ? collection.isLinked() : false }); } } else { arr.push({ name: i, count: collection.count(), linked: collection.isLinked !== undefined ? collection.isLinked() : false }); } } } arr.sort(function (a, b) { return a.name.localeCompare(b.name); }); return arr; }; Shared.finishModule('Collection'); module.exports = Collection; },{"./Condition":8,"./Index2d":15,"./IndexBinaryTree":16,"./IndexHashMap":17,"./KeyValueStore":18,"./Metrics":19,"./Overload":32,"./Path":34,"./ReactorIO":38,"./Shared":40}],7:[function(_dereq_,module,exports){ "use strict"; // Import external names locally var Shared, Db, DbInit, Collection; Shared = _dereq_('./Shared'); /** * Creates a new collection group. Collection groups allow single operations to be * propagated to multiple collections at once. CRUD operations against a collection * group are in fed to the group's collections. Useful when separating out slightly * different data into multiple collections but querying as one collection. * @constructor */ var CollectionGroup = function () { this.init.apply(this, arguments); }; CollectionGroup.prototype.init = function (name) { var self = this; self._name = name; self._data = new Collection('__FDB__cg_data_' + self._name); self._collections = []; self._view = []; }; Shared.addModule('CollectionGroup', CollectionGroup); Shared.mixin(CollectionGroup.prototype, 'Mixin.Common'); Shared.mixin(CollectionGroup.prototype, 'Mixin.ChainReactor'); Shared.mixin(CollectionGroup.prototype, 'Mixin.Constants'); Shared.mixin(CollectionGroup.prototype, 'Mixin.Triggers'); Shared.mixin(CollectionGroup.prototype, 'Mixin.Tags'); Shared.mixin(CollectionGroup.prototype, 'Mixin.Events'); Collection = _dereq_('./Collection'); Db = Shared.modules.Db; DbInit = Shared.modules.Db.prototype.init; CollectionGroup.prototype.on = function () { this._data.on.apply(this._data, arguments); }; CollectionGroup.prototype.off = function () { this._data.off.apply(this._data, arguments); }; CollectionGroup.prototype.emit = function () { this._data.emit.apply(this._data, arguments); }; /** * Gets / sets the primary key for this collection group. * @param {String=} keyName The name of the primary key. * @returns {*} */ CollectionGroup.prototype.primaryKey = function (keyName) { if (keyName !== undefined) { this._primaryKey = keyName; return this; } return this._primaryKey; }; /** * Gets / sets the current state. * @param {String=} val The name of the state to set. * @returns {*} */ Shared.synthesize(CollectionGroup.prototype, 'state'); /** * Gets / sets the db instance the collection group belongs to. * @param {Db=} db The db instance. * @returns {*} */ Shared.synthesize(CollectionGroup.prototype, 'db'); /** * Gets / sets the instance name. * @param {String=} name The new name to set. * @returns {*} */ Shared.synthesize(CollectionGroup.prototype, 'name'); CollectionGroup.prototype.addCollection = function (collection) { if (collection) { if (this._collections.indexOf(collection) === -1) { //var self = this; // Check for compatible primary keys if (this._collections.length) { if (this._primaryKey !== collection.primaryKey()) { throw(this.logIdentifier() + ' All collections in a collection group must have the same primary key!'); } } else { // Set the primary key to the first collection added this.primaryKey(collection.primaryKey()); } // Add the collection this._collections.push(collection); collection._groups = collection._groups || []; collection._groups.push(this); collection.chain(this); // Hook the collection's drop event to destroy group data collection.on('drop', function () { // Remove collection from any group associations if (collection._groups && collection._groups.length) { var groupArr = [], i; // Copy the group array because if we call removeCollection on a group // it will alter the groups array of this collection mid-loop! for (i = 0; i < collection._groups.length; i++) { groupArr.push(collection._groups[i]); } // Loop any groups we are part of and remove ourselves from them for (i = 0; i < groupArr.length; i++) { collection._groups[i].removeCollection(collection); } } delete collection._groups; }); // Add collection's data this._data.insert(collection.find()); } } return this; }; CollectionGroup.prototype.removeCollection = function (collection) { if (collection) { var collectionIndex = this._collections.indexOf(collection), groupIndex; if (collectionIndex !== -1) { collection.unChain(this); this._collections.splice(collectionIndex, 1); collection._groups = collection._groups || []; groupIndex = collection._groups.indexOf(this); if (groupIndex !== -1) { collection._groups.splice(groupIndex, 1); } collection.off('drop'); } if (this._collections.length === 0) { // Wipe the primary key delete this._primaryKey; } } return this; }; CollectionGroup.prototype._chainHandler = function (chainPacket) { //sender = chainPacket.sender; switch (chainPacket.type) { case 'setData': // Decouple the data to ensure we are working with our own copy chainPacket.data.dataSet = this.decouple(chainPacket.data.dataSet); // Remove old data this._data.remove(chainPacket.data.oldData); // Add new data this._data.insert(chainPacket.data.dataSet); break; case 'insert': // Decouple the data to ensure we are working with our own copy chainPacket.data.dataSet = this.decouple(chainPacket.data.dataSet); // Add new data this._data.insert(chainPacket.data.dataSet); break; case 'update': // Update data this._data.update(chainPacket.data.query, chainPacket.data.update, chainPacket.options); break; case 'remove': this._data.remove(chainPacket.data.query, chainPacket.options); break; default: break; } }; CollectionGroup.prototype.insert = function () { this._collectionsRun('insert', arguments); }; CollectionGroup.prototype.update = function () { this._collectionsRun('update', arguments); }; CollectionGroup.prototype.updateById = function () { this._collectionsRun('updateById', arguments); }; CollectionGroup.prototype.remove = function () { this._collectionsRun('remove', arguments); }; CollectionGroup.prototype._collectionsRun = function (type, args) { for (var i = 0; i < this._collections.length; i++) { this._collections[i][type].apply(this._collections[i], args); } }; CollectionGroup.prototype.find = function (query, options) { return this._data.find(query, options); }; /** * Helper method that removes a document that matches the given id. * @param {String} id The id of the document to remove. */ CollectionGroup.prototype.removeById = function (id) { // Loop the collections in this group and apply the remove for (var i = 0; i < this._collections.length; i++) { this._collections[i].removeById(id); } }; /** * Uses the passed query to generate a new collection with results * matching the query parameters. * * @param query * @param options * @returns {*} */ CollectionGroup.prototype.subset = function (query, options) { var result = this.find(query, options); return new Collection() .subsetOf(this) .primaryKey(this._primaryKey) .setData(result); }; /** * Drops a collection group from the database. * @returns {boolean} True on success, false on failure. */ CollectionGroup.prototype.drop = function (callback) { if (!this.isDropped()) { var i, collArr, viewArr; if (this._debug) { console.log(this.logIdentifier() + ' Dropping'); } this._state = 'dropped'; if (this._collections && this._collections.length) { collArr = [].concat(this._collections); for (i = 0; i < collArr.length; i++) { this.removeCollection(collArr[i]); } } if (this._view && this._view.length) { viewArr = [].concat(this._view); for (i = 0; i < viewArr.length; i++) { this._removeView(viewArr[i]); } } this.emit('drop', this); delete this._listeners; if (callback) { callback(false, true); } } return true; }; // Extend DB to include collection groups Db.prototype.init = function () { this._collectionGroup = {}; DbInit.apply(this, arguments); }; /** * Creates a new collectionGroup instance or returns an existing * instance if one already exists with the passed name. * @func collectionGroup * @memberOf Db * @param {String} name The name of the instance. * @returns {*} */ Db.prototype.collectionGroup = function (name) { var self = this; if (name) { // Handle being passed an instance if (name instanceof CollectionGroup) { return name; } if (this._collectionGroup && this._collectionGroup[name]) { return this._collectionGroup[name]; } this._collectionGroup[name] = new CollectionGroup(name).db(this); self.deferEmit('create', self._collectionGroup[name], 'collectionGroup', name); return this._collectionGroup[name]; } else { // Return an object of collection data return this._collectionGroup; } }; /** * Returns an array of collection groups the DB currently has. * @returns {Array} An array of objects containing details of each collection group * the database is currently managing. */ Db.prototype.collectionGroups = function () { var arr = [], i; for (i in this._collectionGroup) { if (this._collectionGroup.hasOwnProperty(i)) { arr.push({ name: i }); } } return arr; }; module.exports = CollectionGroup; },{"./Collection":6,"./Shared":40}],8:[function(_dereq_,module,exports){ "use strict"; var //Overload = require('./Overload'), Shared, Condition; Shared = _dereq_('./Shared'); /** * The condition class monitors a data source and updates it's internal * state depending on clauses that it has been given. When all clauses * are satisfied the then() callback is fired. If conditions were met * but data changed that made them un-met, the else() callback is fired. * @class * @constructor */ Condition = function () { this.init.apply(this, arguments); }; /** * Class constructor calls this init method. * This allows the constructor to be overridden by other modules because * they can override the init method with their own. * @param {Collection|View} dataSource The condition's data source. * @param {String} id The id to assign to the new Condition. * @param {Object} clause The query clause. */ Condition.prototype.init = function (dataSource, id, clause) { this._dataSource = dataSource; this._id = id; this._query = [clause]; this._started = false; this._state = [false]; this._satisfied = false; // Set this to true by default for faster performance this.earlyExit(true); }; // Tell ForerunnerDB about our new module Shared.addModule('Condition', Condition); // Mixin some commonly used methods Shared.mixin(Condition.prototype, 'Mixin.Common'); Shared.mixin(Condition.prototype, 'Mixin.ChainReactor'); Shared.synthesize(Condition.prototype, 'id'); Shared.synthesize(Condition.prototype, 'then'); Shared.synthesize(Condition.prototype, 'else'); Shared.synthesize(Condition.prototype, 'earlyExit'); Shared.synthesize(Condition.prototype, 'debug'); /** * Adds a new clause to the condition. * @param {Object} clause The query clause to add to the condition. * @returns {Condition} */ Condition.prototype.and = function (clause) { this._query.push(clause); this._state.push(false); return this; }; /** * Starts the condition so that changes to data will call callback * methods according to clauses being met. * @param {*} initialState Initial state of condition. * @returns {Condition} */ Condition.prototype.start = function (initialState) { if (!this._started) { var self = this; if (arguments.length !== 0) { this._satisfied = initialState; } // Resolve the current state this._updateStates(); self._onChange = function () { self._updateStates(); }; // Create a chain reactor link to the data source so we start receiving CRUD ops from it this._dataSource.on('change', self._onChange); this._started = true; } return this; }; /** * Updates the internal status of all the clauses against the underlying * data source. * @private */ Condition.prototype._updateStates = function () { var satisfied = true, i; for (i = 0; i < this._query.length; i++) { this._state[i] = this._dataSource.count(this._query[i]) > 0; if (this._debug) { console.log(this.logIdentifier() + ' Evaluating', this._query[i], '=', this._query[i]); } if (!this._state[i]) { satisfied = false; // Early exit since we have found a state that is not true if (this._earlyExit) { break; } } } if (this._satisfied !== satisfied) { // Our state has changed, fire the relevant operation if (satisfied) { // Fire the "then" operation if (this._then) { this._then(); } } else { // Fire the "else" operation if (this._else) { this._else(); } } this._satisfied = satisfied; } }; /** * Stops the condition so that callbacks will no longer fire. * @returns {Condition} */ Condition.prototype.stop = function () { if (this._started) { this._dataSource.off('change', this._onChange); delete this._onChange; this._started = false; } return this; }; /** * Drops the condition and removes it from memory. * @returns {Condition} */ Condition.prototype.drop = function () { this.stop(); delete this._dataSource.when[this._id]; return this; }; // Tell ForerunnerDB that our module has finished loading Shared.finishModule('Condition'); module.exports = Condition; },{"./Shared":40}],9:[function(_dereq_,module,exports){ /* License Copyright (c) 2015 Irrelon Software Limited http://www.irrelon.com http://www.forerunnerdb.com Please visit the license page to see latest license information: http://www.forerunnerdb.com/licensing.html */ "use strict"; var Shared, Db, Metrics, Overload, _instances = []; Shared = _dereq_('./Shared'); Overload = _dereq_('./Overload'); /** * Creates a new ForerunnerDB instance. Core instances handle the lifecycle of * multiple database instances. * @constructor */ var Core = function (val) { this.init.apply(this, arguments); }; Core.prototype.init = function (name) { this._db = {}; this._debug = {}; this._name = name || 'ForerunnerDB'; _instances.push(this); }; /** * Returns the number of instantiated ForerunnerDB objects. * @returns {Number} The number of instantiated instances. */ Core.prototype.instantiatedCount = function () { return _instances.length; }; /** * Get all instances as an array or a single ForerunnerDB instance * by it's array index. * @param {Number=} index Optional index of instance to get. * @returns {Array|Object} Array of instances or a single instance. */ Core.prototype.instances = function (index) { if (index !== undefined) { return _instances[index]; } return _instances; }; /** * Get all instances as an array of instance names or a single ForerunnerDB * instance by it's name. * @param {String=} name Optional name of instance to get. * @returns {Array|Object} Array of instance names or a single instance. */ Core.prototype.namedInstances = function (name) { var i, instArr; if (name !== undefined) { for (i = 0; i < _instances.length; i++) { if (_instances[i].name === name) { return _instances[i]; } } return undefined; } instArr = []; for (i = 0; i < _instances.length; i++) { instArr.push(_instances[i].name); } return instArr; }; Core.prototype.moduleLoaded = new Overload({ /** * Checks if a module has been loaded into the database. * @func moduleLoaded * @memberof Core * @param {String} moduleName The name of the module to check for. * @returns {Boolean} True if the module is loaded, false if not. */ 'string': function (moduleName) { if (moduleName !== undefined) { moduleName = moduleName.replace(/ /g, ''); var modules = moduleName.split(','), index; for (index = 0; index < modules.length; index++) { if (!Shared.modules[modules[index]]) { return false; } } return true; } return false; }, /** * Checks if a module is loaded and if so calls the passed * callback method. * @func moduleLoaded * @memberof Core * @param {String} moduleName The name of the module to check for. * @param {Function} callback The callback method to call if module is loaded. */ 'string, function': function (moduleName, callback) { if (moduleName !== undefined) { moduleName = moduleName.replace(/ /g, ''); var modules = moduleName.split(','), index; for (index = 0; index < modules.length; index++) { if (!Shared.modules[modules[index]]) { return false; } } if (callback) { callback(); } } }, /** * Checks if an array of named modules are loaded and if so * calls the passed callback method. * @func moduleLoaded * @memberof Core * @param {Array} moduleName The array of module names to check for. * @param {Function} callback The callback method to call if modules are loaded. */ 'array, function': function (moduleNameArr, callback) { var moduleName, i; for (i = 0; i < moduleNameArr.length; i++) { moduleName = moduleNameArr[i]; if (moduleName !== undefined) { moduleName = moduleName.replace(/ /g, ''); var modules = moduleName.split(','), index; for (index = 0; index < modules.length; index++) { if (!Shared.modules[modules[index]]) { return false; } } } } if (callback) { callback(); } }, /** * Checks if a module is loaded and if so calls the passed * success method, otherwise calls the failure method. * @func moduleLoaded * @memberof Core * @param {String} moduleName The name of the module to check for. * @param {Function} success The callback method to call if module is loaded. * @param {Function} failure The callback method to call if module not loaded. */ 'string, function, function': function (moduleName, success, failure) { if (moduleName !== undefined) { moduleName = moduleName.replace(/ /g, ''); var modules = moduleName.split(','), index; for (index = 0; index < modules.length; index++) { if (!Shared.modules[modules[index]]) { failure(); return false; } } success(); } } }); /** * Checks version against the string passed and if it matches (or partially matches) * then the callback is called. * @param {String} val The version to check against. * @param {Function} callback The callback to call if match is true. * @returns {Boolean} */ Core.prototype.version = function (val, callback) { if (val !== undefined) { if (Shared.version.indexOf(val) === 0) { if (callback) { callback(); } return true; } return false; } return Shared.version; }; // Expose moduleLoaded() method to non-instantiated object ForerunnerDB Core.moduleLoaded = Core.prototype.moduleLoaded; // Expose version() method to non-instantiated object ForerunnerDB Core.version = Core.prototype.version; // Expose instances() method to non-instantiated object ForerunnerDB Core.instances = Core.prototype.instances; // Expose instantiatedCount() method to non-instantiated object ForerunnerDB Core.instantiatedCount = Core.prototype.instantiatedCount; // Provide public access to the Shared object Core.shared = Shared; Core.prototype.shared = Shared; Shared.addModule('Core', Core); Shared.mixin(Core.prototype, 'Mixin.Common'); Shared.mixin(Core.prototype, 'Mixin.Constants'); Db = _dereq_('./Db.js'); Metrics = _dereq_('./Metrics.js'); /** * Gets / sets the name of the instance. This is primarily used for * name-spacing persistent storage. * @param {String=} val The name of the instance to set. * @returns {*} */ Shared.synthesize(Core.prototype, 'name'); /** * Gets / sets mongodb emulation mode. * @param {Boolean=} val True to enable, false to disable. * @returns {*} */ Shared.synthesize(Core.prototype, 'mongoEmulation'); // Set a flag to determine environment Core.prototype._isServer = false; /** * Checks if the database is running on a client (browser) or * a server (node.js). * @returns {Boolean} Returns true if running on a browser. */ Core.prototype.isClient = function () { return !this._isServer; }; /** * Checks if the database is running on a client (browser) or * a server (node.js). * @returns {Boolean} Returns true if running on a server. */ Core.prototype.isServer = function () { return this._isServer; }; /** * Added to provide an error message for users who have not seen * the new instantiation breaking change warning and try to get * a collection directly from the core instance. */ Core.prototype.collection = function () { throw("ForerunnerDB's instantiation has changed since version 1.3.36 to support multiple database instances. Please see the readme.md file for the minor change you have to make to get your project back up and running, or see the issue related to this change at https://github.com/Irrelon/ForerunnerDB/issues/44"); }; module.exports = Core; },{"./Db.js":10,"./Metrics.js":19,"./Overload":32,"./Shared":40}],10:[function(_dereq_,module,exports){ "use strict"; var Shared, Core, Collection, Metrics, Overload; Shared = _dereq_('./Shared'); Overload = _dereq_('./Overload'); /** * Creates a new ForerunnerDB database instance. * @constructor */ var Db = function (name, core) { this.init.apply(this, arguments); }; Db.prototype.init = function (name, core) { this.core(core); this._primaryKey = '_id'; this._name = name; this._collection = {}; this._debug = {}; }; Shared.addModule('Db', Db); Db.prototype.moduleLoaded = new Overload({ /** * Checks if a module has been loaded into the database. * @func moduleLoaded * @memberof Db * @param {String} moduleName The name of the module to check for. * @returns {Boolean} True if the module is loaded, false if not. */ 'string': function (moduleName) { if (moduleName !== undefined) { moduleName = moduleName.replace(/ /g, ''); var modules = moduleName.split(','), index; for (index = 0; index < modules.length; index++) { if (!Shared.modules[modules[index]]) { return false; } } return true; } return false; }, /** * Checks if a module is loaded and if so calls the passed * callback method. * @func moduleLoaded * @memberof Db * @param {String} moduleName The name of the module to check for. * @param {Function} callback The callback method to call if module is loaded. */ 'string, function': function (moduleName, callback) { if (moduleName !== undefined) { moduleName = moduleName.replace(/ /g, ''); var modules = moduleName.split(','), index; for (index = 0; index < modules.length; index++) { if (!Shared.modules[modules[index]]) { return false; } } if (callback) { callback(); } } }, /** * Checks if a module is loaded and if so calls the passed * success method, otherwise calls the failure method. * @func moduleLoaded * @memberof Db * @param {String} moduleName The name of the module to check for. * @param {Function} success The callback method to call if module is loaded. * @param {Function} failure The callback method to call if module not loaded. */ 'string, function, function': function (moduleName, success, failure) { if (moduleName !== undefined) { moduleName = moduleName.replace(/ /g, ''); var modules = moduleName.split(','), index; for (index = 0; index < modules.length; index++) { if (!Shared.modules[modules[index]]) { failure(); return false; } } success(); } } }); /** * Checks version against the string passed and if it matches (or partially matches) * then the callback is called. * @param {String} val The version to check against. * @param {Function} callback The callback to call if match is true. * @returns {Boolean} */ Db.prototype.version = function (val, callback) { if (val !== undefined) { if (Shared.version.indexOf(val) === 0) { if (callback) { callback(); } return true; } return false; } return Shared.version; }; // Expose moduleLoaded method to non-instantiated object ForerunnerDB Db.moduleLoaded = Db.prototype.moduleLoaded; // Expose version method to non-instantiated object ForerunnerDB Db.version = Db.prototype.version; // Provide public access to the Shared object Db.shared = Shared; Db.prototype.shared = Shared; Shared.addModule('Db', Db); Shared.mixin(Db.prototype, 'Mixin.Common'); Shared.mixin(Db.prototype, 'Mixin.ChainReactor'); Shared.mixin(Db.prototype, 'Mixin.Constants'); Shared.mixin(Db.prototype, 'Mixin.Tags'); Shared.mixin(Db.prototype, 'Mixin.Events'); Core = Shared.modules.Core; Collection = _dereq_('./Collection.js'); Metrics = _dereq_('./Metrics.js'); Db.prototype._isServer = false; /** * Gets / sets the core object this database belongs to. */ Shared.synthesize(Db.prototype, 'core'); /** * Gets / sets the default primary key for new collections. * @param {String=} val The name of the primary key to set. * @returns {*} */ Shared.synthesize(Db.prototype, 'primaryKey'); /** * Gets / sets the current state. * @param {String=} val The name of the state to set. * @returns {*} */ Shared.synthesize(Db.prototype, 'state'); /** * Gets / sets the name of the database. * @param {String=} val The name of the database to set. * @returns {*} */ Shared.synthesize(Db.prototype, 'name'); /** * Gets / sets mongodb emulation mode. * @param {Boolean=} val True to enable, false to disable. * @returns {*} */ Shared.synthesize(Db.prototype, 'mongoEmulation'); /** * Returns true if ForerunnerDB is running on a client browser. * @returns {boolean} */ Db.prototype.isClient = function () { return !this._isServer; }; /** * Returns true if ForerunnerDB is running on a server. * @returns {boolean} */ Db.prototype.isServer = function () { return this._isServer; }; /** * Checks if the database is running on a client (browser) or * a server (node.js). * @returns {Boolean} Returns true if running on a browser. */ Db.prototype.isClient = function () { return !this._isServer; }; /** * Checks if the database is running on a client (browser) or * a server (node.js). * @returns {Boolean} Returns true if running on a server. */ Db.prototype.isServer = function () { return this._isServer; }; /** * Converts a normal javascript array of objects into a DB collection. * @param {Array} arr An array of objects. * @returns {Collection} A new collection instance with the data set to the * array passed. */ Db.prototype.arrayToCollection = function (arr) { return new Collection().setData(arr); }; /** * Registers an event listener against an event name. * @param {String} event The name of the event to listen for. * @param {Function} listener The listener method to call when * the event is fired. * @returns {*} */ /*Db.prototype.on = function(event, listener) { this._listeners = this._listeners || {}; this._listeners[event] = this._listeners[event] || []; this._listeners[event].push(listener); return this; };*/ /** * De-registers an event listener from an event name. * @param {String} event The name of the event to stop listening for. * @param {Function} listener The listener method passed to on() when * registering the event listener. * @returns {*} */ /*Db.prototype.off = function(event, listener) { if (event in this._listeners) { var arr = this._listeners[event], index = arr.indexOf(listener); if (index > -1) { arr.splice(index, 1); } } return this; };*/ /** * Emits an event by name with the given data. * @param {String} event The name of the event to emit. * @param {*=} data The data to emit with the event. * @returns {*} */ /*Db.prototype.emit = function(event, data) { this._listeners = this._listeners || {}; if (event in this._listeners) { var arr = this._listeners[event], arrCount = arr.length, arrIndex; for (arrIndex = 0; arrIndex < arrCount; arrIndex++) { arr[arrIndex].apply(this, Array.prototype.slice.call(arguments, 1)); } } return this; };*/ Db.prototype.peek = function (search) { var i, coll, arr = [], typeOfSearch = typeof search; // Loop collections for (i in this._collection) { if (this._collection.hasOwnProperty(i)) { coll = this._collection[i]; if (typeOfSearch === 'string') { arr = arr.concat(coll.peek(search)); } else { arr = arr.concat(coll.find(search)); } } } return arr; }; /** * Find all documents across all collections in the database that match the passed * string or search object. * @param search String or search object. * @returns {Array} */ Db.prototype.peek = function (search) { var i, coll, arr = [], typeOfSearch = typeof search; // Loop collections for (i in this._collection) { if (this._collection.hasOwnProperty(i)) { coll = this._collection[i]; if (typeOfSearch === 'string') { arr = arr.concat(coll.peek(search)); } else { arr = arr.concat(coll.find(search)); } } } return arr; }; /** * Find all documents across all collections in the database that match the passed * string or search object and return them in an object where each key is the name * of the collection that the document was matched in. * @param search String or search object. * @returns {Object} */ Db.prototype.peekCat = function (search) { var i, coll, cat = {}, arr, typeOfSearch = typeof search; // Loop collections for (i in this._collection) { if (this._collection.hasOwnProperty(i)) { coll = this._collection[i]; if (typeOfSearch === 'string') { arr = coll.peek(search); if (arr && arr.length) { cat[coll.name()] = arr; } } else { arr = coll.find(search); if (arr && arr.length) { cat[coll.name()] = arr; } } } } return cat; }; Db.prototype.drop = new Overload({ /** * Drops the database. * @func drop * @memberof Db */ '': function () { if (!this.isDropped()) { var arr = this.collections(), arrCount = arr.length, arrIndex; this._state = 'dropped'; for (arrIndex = 0; arrIndex < arrCount; arrIndex++) { this.collection(arr[arrIndex].name).drop(); delete this._collection[arr[arrIndex].name]; } this.emit('drop', this); delete this._listeners; delete this._core._db[this._name]; } return true; }, /** * Drops the database with optional callback method. * @func drop * @memberof Db * @param {Function} callback Optional callback method. */ 'function': function (callback) { if (!this.isDropped()) { var arr = this.collections(), arrCount = arr.length, arrIndex, finishCount = 0, afterDrop = function () { finishCount++; if (finishCount === arrCount) { if (callback) { callback(); } } }; this._state = 'dropped'; for (arrIndex = 0; arrIndex < arrCount; arrIndex++) { this.collection(arr[arrIndex].name).drop(afterDrop); delete this._collection[arr[arrIndex].name]; } this.emit('drop', this); delete this._listeners; delete this._core._db[this._name]; } return true; }, /** * Drops the database with optional persistent storage drop. Persistent * storage is dropped by default if no preference is provided. * @func drop * @memberof Db * @param {Boolean} removePersist Drop persistent storage for this database. */ 'boolean': function (removePersist) { if (!this.isDropped()) { var arr = this.collections(), arrCount = arr.length, arrIndex; this._state = 'dropped'; for (arrIndex = 0; arrIndex < arrCount; arrIndex++) { this.collection(arr[arrIndex].name).drop(removePersist); delete this._collection[arr[arrIndex].name]; } this.emit('drop', this); delete this._listeners; delete this._core._db[this._name]; } return true; }, /** * Drops the database and optionally controls dropping persistent storage * and callback method. * @func drop * @memberof Db * @param {Boolean} removePersist Drop persistent storage for this database. * @param {Function} callback Optional callback method. */ 'boolean, function': function (removePersist, callback) { if (!this.isDropped()) { var arr = this.collections(), arrCount = arr.length, arrIndex, finishCount = 0, afterDrop = function () { finishCount++; if (finishCount === arrCount) { if (callback) { callback(); } } }; this._state = 'dropped'; for (arrIndex = 0; arrIndex < arrCount; arrIndex++) { this.collection(arr[arrIndex].name).drop(removePersist, afterDrop); delete this._collection[arr[arrIndex].name]; } this.emit('drop', this); delete this._listeners; delete this._core._db[this._name]; } return true; } }); /** * Gets a database instance by name. * @memberof Core * @param {String=} name Optional name of the database. If none is provided * a random name is assigned. * @returns {Db} */ Core.prototype.db = function (name) { // Handle being passed an instance if (name instanceof Db) { return name; } if (!name) { name = this.objectId(); } this._db[name] = this._db[name] || new Db(name, this); this._db[name].mongoEmulation(this.mongoEmulation()); return this._db[name]; }; /** * Returns an array of databases that ForerunnerDB currently has. * @memberof Core * @param {String|RegExp=} search The optional search string or regular expression to use * to match collection names against. * @returns {Array} An array of objects containing details of each database * that ForerunnerDB is currently managing and it's child entities. */ Core.prototype.databases = function (search) { var arr = [], tmpObj, addDb, i; if (search) { if (!(search instanceof RegExp)) { // Turn the search into a regular expression search = new RegExp(search); } } for (i in this._db) { if (this._db.hasOwnProperty(i)) { addDb = true; if (search) { if (!search.exec(i)) { addDb = false; } } if (addDb) { tmpObj = { name: i, children: [] }; if (this.shared.moduleExists('Collection')) { tmpObj.children.push({ module: 'collection', moduleName: 'Collections', count: this._db[i].collections().length }); } if (this.shared.moduleExists('CollectionGroup')) { tmpObj.children.push({ module: 'collectionGroup', moduleName: 'Collection Groups', count: this._db[i].collectionGroups().length }); } if (this.shared.moduleExists('Document')) { tmpObj.children.push({ module: 'document', moduleName: 'Documents', count: this._db[i].documents().length }); } if (this.shared.moduleExists('Grid')) { tmpObj.children.push({ module: 'grid', moduleName: 'Grids', count: this._db[i].grids().length }); } if (this.shared.moduleExists('Overview')) { tmpObj.children.push({ module: 'overview', moduleName: 'Overviews', count: this._db[i].overviews().length }); } if (this.shared.moduleExists('View')) { tmpObj.children.push({ module: 'view', moduleName: 'Views', count: this._db[i].views().length }); } arr.push(tmpObj); } } } arr.sort(function (a, b) { return a.name.localeCompare(b.name); }); return arr; }; Shared.finishModule('Db'); module.exports = Db; },{"./Collection.js":6,"./Metrics.js":19,"./Overload":32,"./Shared":40}],11:[function(_dereq_,module,exports){ "use strict"; // TODO: Remove the _update* methods because we are already mixing them // TODO: in now via Mixin.Updating and update autobind to extend the _update* // TODO: methods like we already do with collection var Shared, Collection, Overload, Db, Path; Shared = _dereq_('./Shared'); /** * Creates a new Document instance. Documents allow you to create individual * objects that can have standard ForerunnerDB CRUD operations run against * them, as well as data-binding if the AutoBind module is included in your * project. * @name Document * @class Document * @constructor */ var FdbDocument = function () { this.init.apply(this, arguments); }; FdbDocument.prototype.init = function (name) { this._name = name; this._data = {}; }; Shared.addModule('Document', FdbDocument); Shared.mixin(FdbDocument.prototype, 'Mixin.Common'); Shared.mixin(FdbDocument.prototype, 'Mixin.Events'); Shared.mixin(FdbDocument.prototype, 'Mixin.ChainReactor'); Shared.mixin(FdbDocument.prototype, 'Mixin.Constants'); Shared.mixin(FdbDocument.prototype, 'Mixin.Triggers'); Shared.mixin(FdbDocument.prototype, 'Mixin.Matching'); Shared.mixin(FdbDocument.prototype, 'Mixin.Updating'); Shared.mixin(FdbDocument.prototype, 'Mixin.Tags'); Overload = _dereq_('./Overload'); Collection = _dereq_('./Collection'); Db = Shared.modules.Db; Path = _dereq_('./Path'); /** * Gets / sets the current state. * @func state * @memberof Document * @param {String=} val The name of the state to set. * @returns {*} */ Shared.synthesize(FdbDocument.prototype, 'state'); /** * Gets / sets the db instance this class instance belongs to. * @func db * @memberof Document * @param {Db=} db The db instance. * @returns {*} */ Shared.synthesize(FdbDocument.prototype, 'db'); /** * Gets / sets the document name. * @func name * @memberof Document * @param {String=} val The name to assign * @returns {*} */ Shared.synthesize(FdbDocument.prototype, 'name'); /** * Sets the data for the document. * @func setData * @memberof Document * @param data * @param options * @returns {Document} */ FdbDocument.prototype.setData = function (data, options) { var i, eventData, $unset; if (data) { options = options || { $decouple: true }; if (options && options.$decouple === true) { data = this.decouple(data); } if (this._linked) { $unset = {}; // Remove keys that don't exist in the new data from the current object for (i in this._data) { if (i.substr(0, 6) !== 'jQuery' && this._data.hasOwnProperty(i)) { // Check if existing data has key if (data[i] === undefined) { // Add property name to those to unset $unset[i] = 1; } } } data.$unset = $unset; // Now update the object with new data this.updateObject(this._data, data, {}); } else { // Straight data assignment this._data = data; } eventData = {type: 'setData', data: this.decouple(this._data)}; this.emit('immediateChange', eventData); this.deferEmit('change', eventData); } return this; }; /** * Gets the document's data returned as a single object. * @func find * @memberof Document * @param {Object} query The query object - currently unused, just * provide a blank object e.g. {} * @param {Object=} options An options object. * @returns {Object} The document's data object. */ FdbDocument.prototype.find = function (query, options) { var result; if (options && options.$decouple === false) { result = this._data; } else { result = this.decouple(this._data); } return result; }; /** * Finds sub-documents in this document. * @param {Object} match A query to check if this document should * be queried. If this document data doesn't match the query then * no results are returned. * @param {String} path The path string used to identify the * key in which sub-documents are stored in the parent document. * @param {Object=} subDocQuery The query to use when matching * which sub-documents to return. * @param {Object=} subDocOptions The options object to use * when querying for sub-documents. * @returns {*} */ FdbDocument.prototype.findSub = function (match, path, subDocQuery, subDocOptions) { return this._findSub([this.find(match)], path, subDocQuery, subDocOptions); }; FdbDocument.prototype._findSub = function (docArr, path, subDocQuery, subDocOptions) { var pathHandler = new Path(path), docCount = docArr.length, docIndex, subDocArr, subDocCollection = new Collection('__FDB_temp_' + this.objectId()).db(this._db), subDocResults, resultObj = { parents: docCount, subDocTotal: 0, subDocs: [], pathFound: false, err: '' }; subDocOptions = subDocOptions || {}; for (docIndex = 0; docIndex < docCount; docIndex++) { subDocArr = pathHandler.value(docArr[docIndex])[0]; if (subDocArr) { subDocCollection.setData(subDocArr); subDocResults = subDocCollection.find(subDocQuery, subDocOptions); if (subDocOptions.$returnFirst && subDocResults.length) { return subDocResults[0]; } if (subDocOptions.$split) { resultObj.subDocs.push(subDocResults); } else { resultObj.subDocs = resultObj.subDocs.concat(subDocResults); } resultObj.subDocTotal += subDocResults.length; resultObj.pathFound = true; } } // Drop the sub-document collection subDocCollection.drop(); if (!resultObj.pathFound) { resultObj.err = 'No objects found in the parent documents with a matching path of: ' + path; } // Check if the call should not return stats, if so return only subDocs array if (subDocOptions.$stats) { return resultObj; } else { return resultObj.subDocs[0]; } }; /** * Modifies the document. This will update the document with the data held in 'update'. * @func update * @memberof Document * @param {Object} query The query that must be matched for a document to be * operated on. * @param {Object} update The object containing updated key/values. Any keys that * match keys on the existing document will be overwritten with this data. Any * keys that do not currently exist on the document will be added to the document. * @param {Object=} options An options object. * @returns {Array} The items that were updated. */ FdbDocument.prototype.update = function (query, update, options) { var result = this.updateObject(this._data, update, query, options), eventData; if (result) { eventData = {type: 'update', data: this.decouple(this._data)}; this.emit('immediateChange', eventData); this.deferEmit('change', eventData); } }; /** * Internal method for document updating. * @func updateObject * @memberof Document * @param {Object} doc The document to update. * @param {Object} update The object with key/value pairs to update the document with. * @param {Object} query The query object that we need to match to perform an update. * @param {Object} options An options object. * @param {String=} path The current recursive path. * @param {String=} opType The type of update operation to perform, if none is specified * default is to set new data against matching fields. * @returns {Boolean} True if the document was updated with new / changed data or * false if it was not updated because the data was the same. * @private */ FdbDocument.prototype.updateObject = Collection.prototype.updateObject; /** * Determines if the passed key has an array positional mark (a dollar at the end * of its name). * @func _isPositionalKey * @memberof Document * @param {String} key The key to check. * @returns {Boolean} True if it is a positional or false if not. * @private */ FdbDocument.prototype._isPositionalKey = function (key) { return key.substr(key.length - 2, 2) === '.$'; }; /** * Updates a property on an object depending on if the collection is * currently running data-binding or not. * @func _updateProperty * @memberof Document * @param {Object} doc The object whose property is to be updated. * @param {String} prop The property to update. * @param {*} val The new value of the property. * @private */ FdbDocument.prototype._updateProperty = function (doc, prop, val) { if (this._linked) { window.jQuery.observable(doc).setProperty(prop, val); if (this.debug()) { console.log(this.logIdentifier() + ' Setting data-bound document property "' + prop + '"'); } } else { doc[prop] = val; if (this.debug()) { console.log(this.logIdentifier() + ' Setting non-data-bound document property "' + prop + '" to val "' + val + '"'); } } }; /** * Increments a value for a property on a document by the passed number. * @func _updateIncrement * @memberof Document * @param {Object} doc The document to modify. * @param {String} prop The property to modify. * @param {Number} val The amount to increment by. * @private */ FdbDocument.prototype._updateIncrement = function (doc, prop, val) { if (this._linked) { window.jQuery.observable(doc).setProperty(prop, doc[prop] + val); } else { doc[prop] += val; } }; /** * Changes the index of an item in the passed array. * @func _updateSpliceMove * @memberof Document * @param {Array} arr The array to modify. * @param {Number} indexFrom The index to move the item from. * @param {Number} indexTo The index to move the item to. * @private */ FdbDocument.prototype._updateSpliceMove = function (arr, indexFrom, indexTo) { if (this._linked) { window.jQuery.observable(arr).move(indexFrom, indexTo); if (this.debug()) { console.log(this.logIdentifier() + ' Moving data-bound document array index from "' + indexFrom + '" to "' + indexTo + '"'); } } else { arr.splice(indexTo, 0, arr.splice(indexFrom, 1)[0]); if (this.debug()) { console.log(this.logIdentifier() + ' Moving non-data-bound document array index from "' + indexFrom + '" to "' + indexTo + '"'); } } }; /** * Inserts an item into the passed array at the specified index. * @func _updateSplicePush * @memberof Document * @param {Array} arr The array to insert into. * @param {Number} index The index to insert at. * @param {Object} doc The document to insert. * @private */ FdbDocument.prototype._updateSplicePush = function (arr, index, doc) { if (arr.length > index) { if (this._linked) { window.jQuery.observable(arr).insert(index, doc); } else { arr.splice(index, 0, doc); } } else { if (this._linked) { window.jQuery.observable(arr).insert(doc); } else { arr.push(doc); } } }; /** * Inserts an item at the end of an array. * @func _updatePush * @memberof Document * @param {Array} arr The array to insert the item into. * @param {Object} doc The document to insert. * @private */ FdbDocument.prototype._updatePush = function (arr, doc) { if (this._linked) { window.jQuery.observable(arr).insert(doc); } else { arr.push(doc); } }; /** * Removes an item from the passed array. * @func _updatePull * @memberof Document * @param {Array} arr The array to modify. * @param {Number} index The index of the item in the array to remove. * @private */ FdbDocument.prototype._updatePull = function (arr, index) { if (this._linked) { window.jQuery.observable(arr).remove(index); } else { arr.splice(index, 1); } }; /** * Multiplies a value for a property on a document by the passed number. * @func _updateMultiply * @memberof Document * @param {Object} doc The document to modify. * @param {String} prop The property to modify. * @param {Number} val The amount to multiply by. * @private */ FdbDocument.prototype._updateMultiply = function (doc, prop, val) { if (this._linked) { window.jQuery.observable(doc).setProperty(prop, doc[prop] * val); } else { doc[prop] *= val; } }; /** * Renames a property on a document to the passed property. * @func _updateRename * @memberof Document * @param {Object} doc The document to modify. * @param {String} prop The property to rename. * @param {Number} val The new property name. * @private */ FdbDocument.prototype._updateRename = function (doc, prop, val) { var existingVal = doc[prop]; if (this._linked) { window.jQuery.observable(doc).setProperty(val, existingVal); window.jQuery.observable(doc).removeProperty(prop); } else { doc[val] = existingVal; delete doc[prop]; } }; /** * Deletes a property on a document. * @func _updateUnset * @memberof Document * @param {Object} doc The document to modify. * @param {String} prop The property to delete. * @private */ FdbDocument.prototype._updateUnset = function (doc, prop) { if (this._linked) { window.jQuery.observable(doc).removeProperty(prop); } else { delete doc[prop]; } }; /** * Drops the document. * @func drop * @memberof Document * @returns {boolean} True if successful, false if not. */ FdbDocument.prototype.drop = function (callback) { if (!this.isDropped()) { if (this._db && this._name) { if (this._db && this._db._document && this._db._document[this._name]) { this._state = 'dropped'; delete this._db._document[this._name]; delete this._data; this.emit('drop', this); if (callback) { callback(false, true); } delete this._listeners; return true; } } } else { return true; } return false; }; Db.prototype.document = new Overload('Db.prototype.document', { /** * Get a document with no name (generates a random name). If the * document does not already exist then one is created for that * name automatically. * @name document * @method Db.document * @func document * @memberof Db * @returns {Document} */ '': function () { return this.$main.call(this, { name: this.objectId() }); }, /** * Get a document by name. If the document does not already exist * then one is created for that name automatically. * @name document * @method Db.document * @func document * @memberof Db * @param {Object} data An options object or a document instance. * @returns {Document} */ 'object': function (data) { // Handle being passed an instance if (data instanceof FdbDocument) { if (data.state() !== 'droppped') { return data; } else { return this.$main.call(this, { name: data.name() }); } } return this.$main.call(this, data); }, /** * Get a document by name. If the document does not already exist * then one is created for that name automatically. * @name document * @method Db.document * @func document * @memberof Db * @param {String} documentName The name of the document. * @returns {Document} */ 'string': function (documentName) { return this.$main.call(this, { name: documentName }); }, /** * Get a document by name. If the document does not already exist * then one is created for that name automatically. * @name document * @method Db.document * @func document * @memberof Db * @param {String} documentName The name of the document. * @param {Object} options An options object. * @returns {Document} */ 'string, object': function (documentName, options) { options.name = documentName; return this.$main.call(this, options); }, '$main': function (options) { var self = this, name = options.name; if (!name) { if (!options || (options && options.throwError !== false)) { throw(this.logIdentifier() + ' Cannot get document with undefined name!'); } return; } if (this._document && this._document[name]) { return this._document[name]; } if (options && options.autoCreate === false) { if (options && options.throwError !== false) { throw(this.logIdentifier() + ' Cannot get document ' + name + ' because it does not exist and auto-create has been disabled!'); } return undefined; } if (this.debug()) { console.log(this.logIdentifier() + ' Creating document ' + name); } this._document = this._document || {}; this._document[name] = this._document[name] || new FdbDocument(name, options).db(this); // Listen for events on this document so we can fire global events // on the database in response to it self._document[name].on('change', function () { self.emit('change', self._document[name], 'document', name); }); self.deferEmit('create', self._document[name], 'document', name); return this._document[name]; } }); /** * Returns an array of documents the DB currently has. * @func documents * @memberof Db * @returns {Array} An array of objects containing details of each document * the database is currently managing. */ Db.prototype.documents = function () { var arr = [], item, i; for (i in this._document) { if (this._document.hasOwnProperty(i)) { item = this._document[i]; arr.push({ name: i, linked: item.isLinked !== undefined ? item.isLinked() : false }); } } return arr; }; Shared.finishModule('Document'); module.exports = FdbDocument; },{"./Collection":6,"./Overload":32,"./Path":34,"./Shared":40}],12:[function(_dereq_,module,exports){ // geohash.js // Geohash library for Javascript // (c) 2008 David Troy // Distributed under the MIT License // Original at: https://github.com/davetroy/geohash-js // Modified by Irrelon Software Limited (http://www.irrelon.com) // to clean up and modularise the code using Node.js-style exports // and add a few helper methods. // @by Rob Evans - rob@irrelon.com "use strict"; /* Define some shared constants that will be used by all instances of the module. */ var bits, base32, neighbors, borders, PI180 = Math.PI / 180, PI180R = 180 / Math.PI, earthRadius = 6371; // mean radius of the earth bits = [16, 8, 4, 2, 1]; base32 = "0123456789bcdefghjkmnpqrstuvwxyz"; neighbors = { right: {even: "bc01fg45238967deuvhjyznpkmstqrwx"}, left: {even: "238967debc01fg45kmstqrwxuvhjyznp"}, top: {even: "p0r21436x8zb9dcf5h7kjnmqesgutwvy"}, bottom: {even: "14365h7k9dcfesgujnmqp0r2twvyx8zb"} }; borders = { right: {even: "bcfguvyz"}, left: {even: "0145hjnp"}, top: {even: "prxz"}, bottom: {even: "028b"} }; neighbors.bottom.odd = neighbors.left.even; neighbors.top.odd = neighbors.right.even; neighbors.left.odd = neighbors.bottom.even; neighbors.right.odd = neighbors.top.even; borders.bottom.odd = borders.left.even; borders.top.odd = borders.right.even; borders.left.odd = borders.bottom.even; borders.right.odd = borders.top.even; var GeoHash = function () {}; /** * Converts degrees to radians. * @param {Number} degrees * @return {Number} radians */ GeoHash.prototype.radians = function radians (degrees) { return degrees * PI180; }; /** * Converts radians to degrees. * @param {Number} radians * @return {Number} degrees */ GeoHash.prototype.degrees = function (radians) { return radians * PI180R; }; GeoHash.prototype.refineInterval = function (interval, cd, mask) { if (cd & mask) { //jshint ignore: line interval[0] = (interval[0] + interval[1]) / 2; } else { interval[1] = (interval[0] + interval[1]) / 2; } }; /** * Calculates all surrounding neighbours of a hash and returns them. * @param {String} centerHash The hash at the center of the grid. * @param options * @returns {*} */ GeoHash.prototype.calculateNeighbours = function (centerHash, options) { var response; if (!options || options.type === 'object') { response = { center: centerHash, left: this.calculateAdjacent(centerHash, 'left'), right: this.calculateAdjacent(centerHash, 'right'), top: this.calculateAdjacent(centerHash, 'top'), bottom: this.calculateAdjacent(centerHash, 'bottom') }; response.topLeft = this.calculateAdjacent(response.left, 'top'); response.topRight = this.calculateAdjacent(response.right, 'top'); response.bottomLeft = this.calculateAdjacent(response.left, 'bottom'); response.bottomRight = this.calculateAdjacent(response.right, 'bottom'); } else { response = []; response[4] = centerHash; response[3] = this.calculateAdjacent(centerHash, 'left'); response[5] = this.calculateAdjacent(centerHash, 'right'); response[1] = this.calculateAdjacent(centerHash, 'top'); response[7] = this.calculateAdjacent(centerHash, 'bottom'); response[0] = this.calculateAdjacent(response[3], 'top'); response[2] = this.calculateAdjacent(response[5], 'top'); response[6] = this.calculateAdjacent(response[3], 'bottom'); response[8] = this.calculateAdjacent(response[5], 'bottom'); } return response; }; /** * Calculates a new lat/lng by travelling from the center point in the * bearing specified for the distance specified. * @param {Array} centerPoint An array with latitude at index 0 and * longitude at index 1. * @param {Number} distanceKm The distance to travel in kilometers. * @param {Number} bearing The bearing to travel in degrees (zero is * north). * @returns {{lat: Number, lng: Number}} */ GeoHash.prototype.calculateLatLngByDistanceBearing = function (centerPoint, distanceKm, bearing) { var curLon = centerPoint[1], curLat = centerPoint[0], destLat = Math.asin(Math.sin(this.radians(curLat)) * Math.cos(distanceKm / earthRadius) + Math.cos(this.radians(curLat)) * Math.sin(distanceKm / earthRadius) * Math.cos(this.radians(bearing))), tmpLon = this.radians(curLon) + Math.atan2(Math.sin(this.radians(bearing)) * Math.sin(distanceKm / earthRadius) * Math.cos(this.radians(curLat)), Math.cos(distanceKm / earthRadius) - Math.sin(this.radians(curLat)) * Math.sin(destLat)), destLon = (tmpLon + 3 * Math.PI) % (2 * Math.PI) - Math.PI; // normalise to -180..+180º return { lat: this.degrees(destLat), lng: this.degrees(destLon) }; }; /** * Calculates the extents of a bounding box around the center point which * encompasses the radius in kilometers passed. * @param {Array} centerPoint An array with latitude at index 0 and * longitude at index 1. * @param radiusKm Radius in kilometers. * @returns {{lat: Array, lng: Array}} */ GeoHash.prototype.calculateExtentByRadius = function (centerPoint, radiusKm) { var maxWest, maxEast, maxNorth, maxSouth, lat = [], lng = []; maxNorth = this.calculateLatLngByDistanceBearing(centerPoint, radiusKm, 0); maxEast = this.calculateLatLngByDistanceBearing(centerPoint, radiusKm, 90); maxSouth = this.calculateLatLngByDistanceBearing(centerPoint, radiusKm, 180); maxWest = this.calculateLatLngByDistanceBearing(centerPoint, radiusKm, 270); lat[0] = maxNorth.lat; lat[1] = maxSouth.lat; lng[0] = maxWest.lng; lng[1] = maxEast.lng; return { lat: lat, lng: lng }; }; /** * Calculates all the geohashes that make up the bounding box that surrounds * the circle created from the center point and radius passed. * @param {Array} centerPoint An array with latitude at index 0 and * longitude at index 1. * @param {Number} radiusKm The radius in kilometers to encompass. * @param {Number} precision The number of characters to limit the returned * geohash strings to. * @returns {Array} The array of geohashes that encompass the bounding box. */ GeoHash.prototype.calculateHashArrayByRadius = function (centerPoint, radiusKm, precision) { var extent = this.calculateExtentByRadius(centerPoint, radiusKm), northWest = [extent.lat[0], extent.lng[0]], northEast = [extent.lat[0], extent.lng[1]], southWest = [extent.lat[1], extent.lng[0]], northWestHash = this.encode(northWest[0], northWest[1], precision), northEastHash = this.encode(northEast[0], northEast[1], precision), southWestHash = this.encode(southWest[0], southWest[1], precision), hash, widthCount = 0, heightCount = 0, widthIndex, heightIndex, hashArray = []; hash = northWestHash; hashArray.push(hash); // Walk from north west to north east until we find the north east geohash while (hash !== northEastHash) { hash = this.calculateAdjacent(hash, 'right'); widthCount++; hashArray.push(hash); } hash = northWestHash; // Walk from north west to south west until we find the south west geohash while (hash !== southWestHash) { hash = this.calculateAdjacent(hash, 'bottom'); heightCount++; } // We now know the width and height in hash boxes of the area, fill in the // rest of the hashes into the hashArray array for (widthIndex = 0; widthIndex <= widthCount; widthIndex++) { hash = hashArray[widthIndex]; for (heightIndex = 0; heightIndex < heightCount; heightIndex++) { hash = this.calculateAdjacent(hash, 'bottom'); hashArray.push(hash); } } return hashArray; }; /** * Calculates an adjacent hash to the hash passed, in the direction * specified. * @param {String} srcHash The hash to calculate adjacent to. * @param {String} dir Either "top", "left", "bottom" or "right". * @returns {String} The resulting geohash. */ GeoHash.prototype.calculateAdjacent = function (srcHash, dir) { srcHash = srcHash.toLowerCase(); var lastChr = srcHash.charAt(srcHash.length - 1), type = (srcHash.length % 2) ? 'odd' : 'even', base = srcHash.substring(0, srcHash.length - 1); if (borders[dir][type].indexOf(lastChr) !== -1) { base = this.calculateAdjacent(base, dir); } return base + base32[neighbors[dir][type].indexOf(lastChr)]; }; /** * Decodes a string geohash back to a longitude/latitude array. * The array contains three latitudes and three longitudes. The * first of each is the lower extent of the geohash bounding box, * the second is the upper extent and the third is the center * of the geohash bounding box. * @param {String} geohash The hash to decode. * @returns {Object} */ GeoHash.prototype.decode = function (geohash) { var isEven = 1, lat = [], lon = [], i, c, cd, j, mask, latErr, lonErr; lat[0] = -90.0; lat[1] = 90.0; lon[0] = -180.0; lon[1] = 180.0; latErr = 90.0; lonErr = 180.0; for (i = 0; i < geohash.length; i++) { c = geohash[i]; cd = base32.indexOf(c); for (j = 0; j < 5; j++) { mask = bits[j]; if (isEven) { lonErr /= 2; this.refineInterval(lon, cd, mask); } else { latErr /= 2; this.refineInterval(lat, cd, mask); } isEven = !isEven; } } lat[2] = (lat[0] + lat[1]) / 2; lon[2] = (lon[0] + lon[1]) / 2; return { lat: lat, lng: lon }; }; /** * Encodes a longitude/latitude to geohash string. * @param latitude * @param longitude * @param {Number=} precision Length of the geohash string. Defaults to 12. * @returns {String} */ GeoHash.prototype.encode = function (latitude, longitude, precision) { var isEven = 1, mid, lat = [], lon = [], bit = 0, ch = 0, geoHash = ""; if (!precision) { precision = 12; } lat[0] = -90.0; lat[1] = 90.0; lon[0] = -180.0; lon[1] = 180.0; while (geoHash.length < precision) { if (isEven) { mid = (lon[0] + lon[1]) / 2; if (longitude > mid) { ch |= bits[bit]; //jshint ignore: line lon[0] = mid; } else { lon[1] = mid; } } else { mid = (lat[0] + lat[1]) / 2; if (latitude > mid) { ch |= bits[bit]; //jshint ignore: line lat[0] = mid; } else { lat[1] = mid; } } isEven = !isEven; if (bit < 4) { bit++; } else { geoHash += base32[ch]; bit = 0; ch = 0; } } return geoHash; }; if (typeof module !== 'undefined') { module.exports = GeoHash; } },{}],13:[function(_dereq_,module,exports){ "use strict"; // Import external names locally var Shared, Db, Collection, CollectionGroup, View, CollectionInit, DbInit, ReactorIO; //Shared = ForerunnerDB.shared; Shared = _dereq_('./Shared'); /** * Creates a new grid instance. * @name Grid * @class Grid * @param {String} selector jQuery selector. * @param {String} template The template selector. * @param {Object=} options The options object to apply to the grid. * @constructor */ var Grid = function (selector, template, options) { this.init.apply(this, arguments); }; Grid.prototype.init = function (selector, template, options) { var self = this; this._baseQuery = {}; this._selector = selector; this._template = template; this._options = options || {}; this._debug = {}; this._id = this.objectId(); this._collectionDroppedWrap = function () { self._collectionDropped.apply(self, arguments); }; }; Shared.addModule('Grid', Grid); Shared.mixin(Grid.prototype, 'Mixin.Common'); Shared.mixin(Grid.prototype, 'Mixin.ChainReactor'); Shared.mixin(Grid.prototype, 'Mixin.Constants'); Shared.mixin(Grid.prototype, 'Mixin.Triggers'); Shared.mixin(Grid.prototype, 'Mixin.Events'); Shared.mixin(Grid.prototype, 'Mixin.Tags'); Collection = _dereq_('./Collection'); CollectionGroup = _dereq_('./CollectionGroup'); View = _dereq_('./View'); ReactorIO = _dereq_('./ReactorIO'); CollectionInit = Collection.prototype.init; Db = Shared.modules.Db; DbInit = Db.prototype.init; /** * Gets / sets the current state. * @func state * @memberof Grid * @param {String=} val The name of the state to set. * @returns {Grid} */ Shared.synthesize(Grid.prototype, 'state'); /** * Gets / sets the current name. * @func name * @memberof Grid * @param {String=} val The name to set. * @returns {Grid} */ Shared.synthesize(Grid.prototype, 'name'); /** * Executes an insert against the grid's underlying data-source. * @func insert * @memberof Grid */ Grid.prototype.insert = function () { this._from.insert.apply(this._from, arguments); }; /** * Executes an update against the grid's underlying data-source. * @func update * @memberof Grid */ Grid.prototype.update = function () { this._from.update.apply(this._from, arguments); }; /** * Executes an updateById against the grid's underlying data-source. * @func updateById * @memberof Grid */ Grid.prototype.updateById = function () { this._from.updateById.apply(this._from, arguments); }; /** * Executes a remove against the grid's underlying data-source. * @func remove * @memberof Grid */ Grid.prototype.remove = function () { this._from.remove.apply(this._from, arguments); }; /** * Sets the data source from which the grid will assemble its data. * @func from * @memberof Grid * @param {View} dataSource The data source to use to assemble grid data. * @returns {Grid} */ Grid.prototype.from = function (dataSource) { //var self = this; if (dataSource !== undefined) { // Check if we have an existing from if (this._from) { // Remove the listener to the drop event this._from.off('drop', this._collectionDroppedWrap); this._from._removeGrid(this); } if (typeof(dataSource) === 'string') { dataSource = this._db.collection(dataSource); } this._from = dataSource; this._from.on('drop', this._collectionDroppedWrap); // If the data source has a query method, assign the current // query to our base query so we use it in all further updates if (typeof this._from.query === 'function') { this._baseQuery = this._from.query(); } this.refresh(); } return this; }; /** * Gets / sets the db instance this class instance belongs to. * @func db * @memberof Grid * @param {Db=} db The db instance. * @returns {*} */ Shared.synthesize(Grid.prototype, 'db', function (db) { if (db) { // Apply the same debug settings this.debug(db.debug()); } return this.$super.apply(this, arguments); }); Grid.prototype._collectionDropped = function (collection) { if (collection) { // Collection was dropped, remove from grid delete this._from; } }; /** * Drops a grid and all it's stored data from the database. * @func drop * @memberof Grid * @returns {boolean} True on success, false on failure. */ Grid.prototype.drop = function (callback) { if (!this.isDropped()) { if (this._from) { // Remove data-binding this._from.unlink(this._selector, this.template()); // Kill listeners and references this._from.off('drop', this._collectionDroppedWrap); this._from._removeGrid(this); if (this.debug() || (this._db && this._db.debug())) { console.log(this.logIdentifier() + ' Dropping grid ' + this._selector); } this._state = 'dropped'; if (this._db && this._selector) { delete this._db._grid[this._selector]; } this.emit('drop', this); if (callback) { callback(false, true); } delete this._selector; delete this._template; delete this._from; delete this._db; delete this._listeners; return true; } } else { return true; } return false; }; /** * Gets / sets the grid's HTML template to use when rendering. * @func template * @memberof Grid * @param {String} template The template's jQuery selector. * @returns {*} */ Grid.prototype.template = function (template) { if (template !== undefined) { this._template = template; return this; } return this._template; }; Grid.prototype._sortGridClick = function (e) { var elem = window.jQuery(e.currentTarget), sortColText = elem.attr('data-grid-sort') || '', sortColDir = parseInt((elem.attr('data-grid-dir') || "-1"), 10) === -1 ? 1 : -1, sortCols = sortColText.split(','), sortObj = {}, i; // Remove all grid sort tags from the grid window.jQuery(this._selector).find('[data-grid-dir]').removeAttr('data-grid-dir'); // Flip the sort direction elem.attr('data-grid-dir', sortColDir); for (i = 0; i < sortCols.length; i++) { sortObj[sortCols] = sortColDir; } Shared.mixin(sortObj, this._options.$orderBy); this.emit('beforeChange', 'sort'); this.emit('beforeSort', sortObj); this._from.orderBy(sortObj); this.emit('sort', sortObj); }; /** * Sets the base query and query options on the grid that * will remain part of the query regardless of other changes. * @param {Object} queryObj The query object. * @param {Object} queryOptions The query options object. * @returns {Grid} */ Grid.prototype.query = function (queryObj, queryOptions) { this._baseQuery = queryObj; this._baseQueryOptions = queryOptions; return this; }; /** * Refreshes the grid data such as ordering etc. * @func refresh * @memberof Grid */ Grid.prototype.refresh = function () { if (this._from) { if (this._from.link) { var self = this, elem = window.jQuery(this._selector), sortClickListener = function () { self._sortGridClick.apply(self, arguments); }; // Clear the container elem.html(''); if (self._from.orderBy) { // Remove listeners elem.off('click', '[data-grid-sort]', sortClickListener); } // Set wrap name if none is provided self._options.$wrap = self._options.$wrap || 'gridRow'; // Auto-bind the data to the grid template self._from.link(self._selector, self.template(), self._options); // Check if the data source (collection or view) has an // orderBy method (usually only views) and if so activate // the sorting system if (self._from.orderBy) { // Listen for sort requests elem.on('click', '[data-grid-sort]', sortClickListener); } if (self._from.query) { // Listen for filter requests var queryObj = self.decouple(self._baseQuery); elem.find('[data-grid-filter]').each(function (index, filterElem) { filterElem = window.jQuery(filterElem); var filterField = filterElem.attr('data-grid-filter'), filterVarType = filterElem.attr('data-grid-vartype'), filterSort = {}, title = filterElem.html(), dropDownButton, dropDownMenu, template, onFilterSourceChange, filterQuery, filterView = self._db.view('tmpGridFilter_' + self._id + '_' + filterField); filterSort[filterField] = 1; filterQuery = { $distinct: filterSort }; filterView .query(filterQuery) .orderBy(filterSort) .from(self._from._from); onFilterSourceChange = function () { filterView.refresh(); }; // Listen for changes on the filter source and update // the filters if changes occur self._from._from.on('change', onFilterSourceChange); // This bit of commented code can filter other filter // views so that only items that match other filter // selections show in the other filters, but this can // be confusing to the user. The best possible way to // use this would be to "grey out" the selections that // cannot be used because they would return zero results // when used in conjunction with other filter selections. /*self._from.on('change', function () { if (self._from && self._from instanceof View) { var query = self._from.query(); query.$distinct = filterSort; delete query[filterField]; filterView .query(query); console.log(self._from._from.find(query)); } });*/ // Listen for the grid being dropped and remove listener // for changes on filter source self.on('drop', function () { if (self._from && self._from._from) { self._from._from.off('change', onFilterSourceChange); } filterView.drop(); }); template = [ '' ]; dropDownButton = window.jQuery(template.join('')); dropDownMenu = window.jQuery(''); dropDownButton.append(dropDownMenu); filterElem.html(dropDownButton); // Data-link the underlying data to the grid filter drop-down filterView.link(dropDownMenu, { template: [ '', '', '
  • ', '', ' All', '', '
  • ', '', '{^{for options}}', '
  • ', '', ' {^{:' + filterField + '}}', '', '
  • ', '{{/for}}' ].join('') }, { $wrap: 'options' }); elem.on('keyup', '#' + self._id + '_' + filterField + '_dropdownMenu .gridFilterSearch', function (e) { var elem = window.jQuery(this), query = filterView.query(), search = elem.val(); if (search) { query[filterField] = new RegExp(search, 'gi'); } else { delete query[filterField]; } filterView.query(query); }); elem.on('click', '#' + self._id + '_' + filterField + '_dropdownMenu .gridFilterClearSearch', function (e) { // Clear search text box window.jQuery(this).parents('li').find('.gridFilterSearch').val(''); // Clear view query var query = filterView.query(); delete query[filterField]; filterView.query(query); }); elem.on('click', '#' + self._id + '_' + filterField + '_dropdownMenu li', function (e) { e.stopPropagation(); var fieldValue, elem = $(this), checkbox = elem.find('input[type="checkbox"]'), checked, addMode = true, fieldInArr, liElem, i; // If the checkbox is not the one clicked on if (!window.jQuery(e.target).is('input')) { // Set checkbox to opposite of current value checkbox.prop('checked', !checkbox.prop('checked')); checked = checkbox.is(':checked'); } else { checkbox.prop('checked', checkbox.prop('checked')); checked = checkbox.is(':checked'); } liElem = window.jQuery(this); fieldValue = liElem.attr('data-val'); // Check if the selection is the "all" option if (fieldValue === '$all') { // Remove the field from the query delete queryObj[filterField]; // Clear all other checkboxes liElem.parent().find('li[data-val!="$all"]').find('input[type="checkbox"]').prop('checked', false); } else { // Clear the "all" checkbox liElem.parent().find('[data-val="$all"]').find('input[type="checkbox"]').prop('checked', false); // Check if the type needs casting switch (filterVarType) { case 'integer': fieldValue = parseInt(fieldValue, 10); break; case 'float': fieldValue = parseFloat(fieldValue); break; default: } // Check if the item exists already queryObj[filterField] = queryObj[filterField] || { $in: [] }; fieldInArr = queryObj[filterField].$in; // If no $in array exists for this field, generate one now if (!fieldInArr) { fieldInArr = queryObj[filterField].$in = []; } for (i = 0; i < fieldInArr.length; i++) { if (fieldInArr[i] === fieldValue) { // Item already exists if (checked === false) { // Remove the item fieldInArr.splice(i, 1); } addMode = false; break; } } if (addMode && checked) { fieldInArr.push(fieldValue); } if (!fieldInArr.length) { // Remove the field from the query delete queryObj[filterField]; } } self.emit('beforeChange', 'filter'); self.emit('beforeFilter', queryObj); // Set the view query self._from.queryData(queryObj); if (self._from.pageFirst) { self._from.pageFirst(); } }); }); } self.emit('refresh'); } else { throw('Grid requires the AutoBind module in order to operate!'); } } return this; }; /** * Returns the number of documents currently in the grid. * @func count * @memberof Grid * @returns {Number} */ Grid.prototype.count = function () { return this._from.count(); }; /** * Creates a grid and assigns the collection as its data source. * @func grid * @memberof Collection * @param {String} selector jQuery selector of grid output target. * @param {String} template The table template to use when rendering the grid. * @param {Object=} options The options object to apply to the grid. * @returns {*} */ Collection.prototype.grid = View.prototype.grid = function (selector, template, options) { if (this._db && this._db._grid ) { if (selector !== undefined) { if (template !== undefined) { if (!this._db._grid[selector]) { var grid = new Grid(selector, template, options) .db(this._db) .from(this); this._grid = this._grid || []; this._grid.push(grid); this._db._grid[selector] = grid; return grid; } else { throw(this.logIdentifier() + ' Cannot create a grid because a grid with this name already exists: ' + selector); } } return this._db._grid[selector]; } return this._db._grid; } }; /** * Removes a grid safely from the DOM. Must be called when grid is * no longer required / is being removed from DOM otherwise references * will stick around and cause memory leaks. * @func unGrid * @memberof Collection * @param {String} selector jQuery selector of grid output target. * @param {String} template The table template to use when rendering the grid. * @param {Object=} options The options object to apply to the grid. * @returns {*} */ Collection.prototype.unGrid = View.prototype.unGrid = function (selector, template, options) { var i, grid; if (this._db && this._db._grid ) { if (selector && template) { if (this._db._grid[selector]) { grid = this._db._grid[selector]; delete this._db._grid[selector]; return grid.drop(); } else { throw(this.logIdentifier() + ' Cannot remove grid because a grid with this name does not exist: ' + name); } } else { // No parameters passed, remove all grids from this module for (i in this._db._grid) { if (this._db._grid.hasOwnProperty(i)) { grid = this._db._grid[i]; delete this._db._grid[i]; grid.drop(); if (this.debug()) { console.log(this.logIdentifier() + ' Removed grid binding "' + i + '"'); } } } this._db._grid = {}; } } }; /** * Adds a grid to the internal grid lookup. * @func _addGrid * @memberof Collection * @param {Grid} grid The grid to add. * @returns {Collection} * @private */ Collection.prototype._addGrid = CollectionGroup.prototype._addGrid = View.prototype._addGrid = function (grid) { if (grid !== undefined) { this._grid = this._grid || []; this._grid.push(grid); } return this; }; /** * Removes a grid from the internal grid lookup. * @func _removeGrid * @memberof Collection * @param {Grid} grid The grid to remove. * @returns {Collection} * @private */ Collection.prototype._removeGrid = CollectionGroup.prototype._removeGrid = View.prototype._removeGrid = function (grid) { if (grid !== undefined && this._grid) { var index = this._grid.indexOf(grid); if (index > -1) { this._grid.splice(index, 1); } } return this; }; // Extend DB with grids init Db.prototype.init = function () { this._grid = {}; DbInit.apply(this, arguments); }; /** * Determine if a grid with the passed name already exists. * @func gridExists * @memberof Db * @param {String} selector The jQuery selector to bind the grid to. * @returns {boolean} */ Db.prototype.gridExists = function (selector) { return Boolean(this._grid[selector]); }; /** * Creates a grid based on the passed arguments. * @func grid * @memberof Db * @param {String} selector The jQuery selector of the grid to retrieve. * @param {String} template The table template to use when rendering the grid. * @param {Object=} options The options object to apply to the grid. * @returns {*} */ Db.prototype.grid = function (selector, template, options) { if (!this._grid[selector]) { if (this.debug() || (this._db && this._db.debug())) { console.log(this.logIdentifier() + ' Creating grid ' + selector); } } this._grid[selector] = this._grid[selector] || new Grid(selector, template, options).db(this); return this._grid[selector]; }; /** * Removes a grid based on the passed arguments. * @func unGrid * @memberof Db * @param {String} selector The jQuery selector of the grid to retrieve. * @param {String} template The table template to use when rendering the grid. * @param {Object=} options The options object to apply to the grid. * @returns {*} */ Db.prototype.unGrid = function (selector, template, options) { if (!this._grid[selector]) { if (this.debug() || (this._db && this._db.debug())) { console.log(this.logIdentifier() + ' Creating grid ' + selector); } } this._grid[selector] = this._grid[selector] || new Grid(selector, template, options).db(this); return this._grid[selector]; }; /** * Returns an array of grids the DB currently has. * @func grids * @memberof Db * @returns {Array} An array of objects containing details of each grid * the database is currently managing. */ Db.prototype.grids = function () { var arr = [], item, i; for (i in this._grid) { if (this._grid.hasOwnProperty(i)) { item = this._grid[i]; arr.push({ name: i, count: item.count(), linked: item.isLinked !== undefined ? item.isLinked() : false }); } } return arr; }; Shared.finishModule('Grid'); module.exports = Grid; },{"./Collection":6,"./CollectionGroup":7,"./ReactorIO":38,"./Shared":40,"./View":42}],14:[function(_dereq_,module,exports){ "use strict"; // Import external names locally var Shared, Collection, CollectionInit, Overload; Shared = _dereq_('./Shared'); Overload = _dereq_('./Overload'); /** * The constructor. * * @constructor */ var Highchart = function (collection, options) { this.init.apply(this, arguments); }; Highchart.prototype.init = function (collection, options) { this._options = options; this._selector = window.jQuery(this._options.selector); if (!this._selector[0]) { throw(this.classIdentifier() + ' "' + collection.name() + '": Chart target element does not exist via selector: ' + this._options.selector); } this._listeners = {}; this._collection = collection; // Setup the chart this._options.series = []; // Disable attribution on highcharts options.chartOptions = options.chartOptions || {}; options.chartOptions.credits = false; // Set the data for the chart var data, seriesObj, chartData; switch (this._options.type) { case 'pie': // Create chart from data this._selector.highcharts(this._options.chartOptions); this._chart = this._selector.highcharts(); // Generate graph data from collection data data = this._collection.find(); seriesObj = { allowPointSelect: true, cursor: 'pointer', dataLabels: { enabled: true, format: '{point.name}: {y} ({point.percentage:.0f}%)', style: { color: (window.Highcharts.theme && window.Highcharts.theme.contrastTextColor) || 'black' } } }; chartData = this.pieDataFromCollectionData(data, this._options.keyField, this._options.valField); window.jQuery.extend(seriesObj, this._options.seriesOptions); window.jQuery.extend(seriesObj, { name: this._options.seriesName, data: chartData }); this._chart.addSeries(seriesObj, true, true); break; case 'line': case 'area': case 'column': case 'bar': // Generate graph data from collection data chartData = this.seriesDataFromCollectionData( this._options.seriesField, this._options.keyField, this._options.valField, this._options.orderBy, this._options ); this._options.chartOptions.xAxis = chartData.xAxis; this._options.chartOptions.series = chartData.series; this._selector.highcharts(this._options.chartOptions); this._chart = this._selector.highcharts(); break; default: throw(this.classIdentifier() + ' "' + collection.name() + '": Chart type specified is not currently supported by ForerunnerDB: ' + this._options.type); } // Hook the collection events to auto-update the chart this._hookEvents(); }; Shared.addModule('Highchart', Highchart); Collection = Shared.modules.Collection; CollectionInit = Collection.prototype.init; Shared.mixin(Highchart.prototype, 'Mixin.Common'); Shared.mixin(Highchart.prototype, 'Mixin.Events'); /** * Gets / sets the current state. * @param {String=} val The name of the state to set. * @returns {*} */ Shared.synthesize(Highchart.prototype, 'state'); /** * Generate pie-chart series data from the given collection data array. * @param data * @param keyField * @param valField * @returns {Array} */ Highchart.prototype.pieDataFromCollectionData = function (data, keyField, valField) { var graphData = [], i; for (i = 0; i < data.length; i++) { graphData.push([data[i][keyField], data[i][valField]]); } return graphData; }; /** * Generate line-chart series data from the given collection data array. * @param seriesField * @param keyField * @param valField * @param orderBy */ Highchart.prototype.seriesDataFromCollectionData = function (seriesField, keyField, valField, orderBy, options) { var data = this._collection.distinct(seriesField), seriesData = [], xAxis = options && options.chartOptions && options.chartOptions.xAxis ? options.chartOptions.xAxis : { categories: [] }, seriesName, query, dataSearch, seriesValues, sData, i, k; // What we WANT to output: /*series: [{ name: 'Responses', data: [7.0, 6.9, 9.5, 14.5, 18.2, 21.5, 25.2, 26.5, 23.3, 18.3, 13.9, 9.6] }]*/ // Loop keys for (i = 0; i < data.length; i++) { seriesName = data[i]; query = {}; query[seriesField] = seriesName; seriesValues = []; dataSearch = this._collection.find(query, { orderBy: orderBy }); // Loop the keySearch data and grab the value for each item for (k = 0; k < dataSearch.length; k++) { if (xAxis.categories) { xAxis.categories.push(dataSearch[k][keyField]); seriesValues.push(dataSearch[k][valField]); } else { seriesValues.push([dataSearch[k][keyField], dataSearch[k][valField]]); } } sData = { name: seriesName, data: seriesValues }; if (options.seriesOptions) { for (k in options.seriesOptions) { if (options.seriesOptions.hasOwnProperty(k)) { sData[k] = options.seriesOptions[k]; } } } seriesData.push(sData); } return { xAxis: xAxis, series: seriesData }; }; /** * Hook the events the chart needs to know about from the internal collection. * @private */ Highchart.prototype._hookEvents = function () { var self = this; self._collection.on('change', function () { self._changeListener.apply(self, arguments); }); // If the collection is dropped, clean up after ourselves self._collection.on('drop', function () { self.drop.apply(self); }); }; /** * Handles changes to the collection data that the chart is reading from and then * updates the data in the chart display. * @private */ Highchart.prototype._changeListener = function () { var self = this; // Update the series data on the chart if (typeof self._collection !== 'undefined' && self._chart) { var data = self._collection.find(), i; switch (self._options.type) { case 'pie': self._chart.series[0].setData( self.pieDataFromCollectionData( data, self._options.keyField, self._options.valField ), true, true ); break; case 'bar': case 'line': case 'area': case 'column': var seriesData = self.seriesDataFromCollectionData( self._options.seriesField, self._options.keyField, self._options.valField, self._options.orderBy, self._options ); if (seriesData.xAxis.categories) { self._chart.xAxis[0].setCategories( seriesData.xAxis.categories ); } for (i = 0; i < seriesData.series.length; i++) { if (self._chart.series[i]) { // Series exists, set it's data self._chart.series[i].setData( seriesData.series[i].data, true, true ); } else { // Series data does not yet exist, add a new series self._chart.addSeries( seriesData.series[i], true, true ); } } break; default: break; } } }; /** * Destroys the chart and all internal references. * @returns {Boolean} */ Highchart.prototype.drop = function (callback) { if (!this.isDropped()) { this._state = 'dropped'; if (this._chart) { this._chart.destroy(); } if (this._collection) { this._collection.off('change', this._changeListener); this._collection.off('drop', this.drop); if (this._collection._highcharts) { delete this._collection._highcharts[this._options.selector]; } } delete this._chart; delete this._options; delete this._collection; this.emit('drop', this); if (callback) { callback(false, true); } delete this._listeners; return true; } else { return true; } }; // Extend collection with highchart init Collection.prototype.init = function () { this._highcharts = {}; CollectionInit.apply(this, arguments); }; /** * Creates a pie chart from the collection. * @type {Overload} */ Collection.prototype.pieChart = new Overload({ /** * Chart via options object. * @func pieChart * @memberof Highchart * @param {Object} options The options object. * @returns {*} */ 'object': function (options) { options.type = 'pie'; options.chartOptions = options.chartOptions || {}; options.chartOptions.chart = options.chartOptions.chart || {}; options.chartOptions.chart.type = 'pie'; if (!this._highcharts[options.selector]) { // Store new chart in charts array this._highcharts[options.selector] = new Highchart(this, options); } return this._highcharts[options.selector]; }, /** * Chart via defined params and an options object. * @func pieChart * @memberof Highchart * @param {String|jQuery} selector The element to render the chart to. * @param {String} keyField The field to use as the data key. * @param {String} valField The field to use as the data value. * @param {String} seriesName The name of the series to display on the chart. * @param {Object} options The options object. */ '*, string, string, string, ...': function (selector, keyField, valField, seriesName, options) { options = options || {}; options.selector = selector; options.keyField = keyField; options.valField = valField; options.seriesName = seriesName; // Call the main chart method this.pieChart(options); } }); /** * Creates a line chart from the collection. * @type {Overload} */ Collection.prototype.lineChart = new Overload({ /** * Chart via selector. * @func lineChart * @memberof Highchart * @param {String} selector The chart selector. * @returns {*} */ 'string': function (selector) { return this._highcharts[selector]; }, /** * Chart via options object. * @func lineChart * @memberof Highchart * @param {Object} options The options object. * @returns {*} */ 'object': function (options) { options.type = 'line'; options.chartOptions = options.chartOptions || {}; options.chartOptions.chart = options.chartOptions.chart || {}; options.chartOptions.chart.type = 'line'; if (!this._highcharts[options.selector]) { // Store new chart in charts array this._highcharts[options.selector] = new Highchart(this, options); } return this._highcharts[options.selector]; }, /** * Chart via defined params and an options object. * @func lineChart * @memberof Highchart * @param {String|jQuery} selector The element to render the chart to. * @param {String} seriesField The name of the series to plot. * @param {String} keyField The field to use as the data key. * @param {String} valField The field to use as the data value. * @param {Object} options The options object. */ '*, string, string, string, ...': function (selector, seriesField, keyField, valField, options) { options = options || {}; options.seriesField = seriesField; options.selector = selector; options.keyField = keyField; options.valField = valField; // Call the main chart method this.lineChart(options); } }); /** * Creates an area chart from the collection. * @type {Overload} */ Collection.prototype.areaChart = new Overload({ /** * Chart via options object. * @func areaChart * @memberof Highchart * @param {Object} options The options object. * @returns {*} */ 'object': function (options) { options.type = 'area'; options.chartOptions = options.chartOptions || {}; options.chartOptions.chart = options.chartOptions.chart || {}; options.chartOptions.chart.type = 'area'; if (!this._highcharts[options.selector]) { // Store new chart in charts array this._highcharts[options.selector] = new Highchart(this, options); } return this._highcharts[options.selector]; }, /** * Chart via defined params and an options object. * @func areaChart * @memberof Highchart * @param {String|jQuery} selector The element to render the chart to. * @param {String} seriesField The name of the series to plot. * @param {String} keyField The field to use as the data key. * @param {String} valField The field to use as the data value. * @param {Object} options The options object. */ '*, string, string, string, ...': function (selector, seriesField, keyField, valField, options) { options = options || {}; options.seriesField = seriesField; options.selector = selector; options.keyField = keyField; options.valField = valField; // Call the main chart method this.areaChart(options); } }); /** * Creates a column chart from the collection. * @type {Overload} */ Collection.prototype.columnChart = new Overload({ /** * Chart via options object. * @func columnChart * @memberof Highchart * @param {Object} options The options object. * @returns {*} */ 'object': function (options) { options.type = 'column'; options.chartOptions = options.chartOptions || {}; options.chartOptions.chart = options.chartOptions.chart || {}; options.chartOptions.chart.type = 'column'; if (!this._highcharts[options.selector]) { // Store new chart in charts array this._highcharts[options.selector] = new Highchart(this, options); } return this._highcharts[options.selector]; }, /** * Chart via defined params and an options object. * @func columnChart * @memberof Highchart * @param {String|jQuery} selector The element to render the chart to. * @param {String} seriesField The name of the series to plot. * @param {String} keyField The field to use as the data key. * @param {String} valField The field to use as the data value. * @param {Object} options The options object. */ '*, string, string, string, ...': function (selector, seriesField, keyField, valField, options) { options = options || {}; options.seriesField = seriesField; options.selector = selector; options.keyField = keyField; options.valField = valField; // Call the main chart method this.columnChart(options); } }); /** * Creates a bar chart from the collection. * @type {Overload} */ Collection.prototype.barChart = new Overload({ /** * Chart via options object. * @func barChart * @memberof Highchart * @param {Object} options The options object. * @returns {*} */ 'object': function (options) { options.type = 'bar'; options.chartOptions = options.chartOptions || {}; options.chartOptions.chart = options.chartOptions.chart || {}; options.chartOptions.chart.type = 'bar'; if (!this._highcharts[options.selector]) { // Store new chart in charts array this._highcharts[options.selector] = new Highchart(this, options); } return this._highcharts[options.selector]; }, /** * Chart via defined params and an options object. * @func barChart * @memberof Highchart * @param {String|jQuery} selector The element to render the chart to. * @param {String} seriesField The name of the series to plot. * @param {String} keyField The field to use as the data key. * @param {String} valField The field to use as the data value. * @param {Object} options The options object. */ '*, string, string, string, ...': function (selector, seriesField, keyField, valField, options) { options = options || {}; options.seriesField = seriesField; options.selector = selector; options.keyField = keyField; options.valField = valField; // Call the main chart method this.barChart(options); } }); /** * Creates a stacked bar chart from the collection. * @type {Overload} */ Collection.prototype.stackedBarChart = new Overload({ /** * Chart via options object. * @func stackedBarChart * @memberof Highchart * @param {Object} options The options object. * @returns {*} */ 'object': function (options) { options.type = 'bar'; options.chartOptions = options.chartOptions || {}; options.chartOptions.chart = options.chartOptions.chart || {}; options.chartOptions.chart.type = 'bar'; options.plotOptions = options.plotOptions || {}; options.plotOptions.series = options.plotOptions.series || {}; options.plotOptions.series.stacking = options.plotOptions.series.stacking || 'normal'; if (!this._highcharts[options.selector]) { // Store new chart in charts array this._highcharts[options.selector] = new Highchart(this, options); } return this._highcharts[options.selector]; }, /** * Chart via defined params and an options object. * @func stackedBarChart * @memberof Highchart * @param {String|jQuery} selector The element to render the chart to. * @param {String} seriesField The name of the series to plot. * @param {String} keyField The field to use as the data key. * @param {String} valField The field to use as the data value. * @param {Object} options The options object. */ '*, string, string, string, ...': function (selector, seriesField, keyField, valField, options) { options = options || {}; options.seriesField = seriesField; options.selector = selector; options.keyField = keyField; options.valField = valField; // Call the main chart method this.stackedBarChart(options); } }); /** * Removes a chart from the page by it's selector. * @memberof Collection * @param {String} selector The chart selector. */ Collection.prototype.dropChart = function (selector) { if (this._highcharts && this._highcharts[selector]) { this._highcharts[selector].drop(); } }; Shared.finishModule('Highchart'); module.exports = Highchart; },{"./Overload":32,"./Shared":40}],15:[function(_dereq_,module,exports){ "use strict"; /* name(string) id(string) rebuild(null) state ?? needed? match(query, options) lookup(query, options) insert(doc) remove(doc) primaryKey(string) collection(collection) */ var Shared = _dereq_('./Shared'), Path = _dereq_('./Path'), BinaryTree = _dereq_('./BinaryTree'), GeoHash = _dereq_('./GeoHash'), sharedPathSolver = new Path(), sharedGeoHashSolver = new GeoHash(), // GeoHash Distances in Kilometers geoHashDistance = [ 5000, 1250, 156, 39.1, 4.89, 1.22, 0.153, 0.0382, 0.00477, 0.00119, 0.000149, 0.0000372 ]; /** * The index class used to instantiate 2d indexes that the database can * use to handle high-performance geospatial queries. * @constructor */ var Index2d = function () { this.init.apply(this, arguments); }; /** * Create the index. * @param {Object} keys The object with the keys that the user wishes the index * to operate on. * @param {Object} options Can be undefined, if passed is an object with arbitrary * options keys and values. * @param {Collection} collection The collection the index should be created for. */ Index2d.prototype.init = function (keys, options, collection) { this._btree = new BinaryTree(); this._btree.index(keys); this._size = 0; this._id = this._itemKeyHash(keys, keys); this._debug = options && options.debug ? options.debug : false; this.unique(options && options.unique ? options.unique : false); if (keys !== undefined) { this.keys(keys); } if (collection !== undefined) { this.collection(collection); this._btree.primaryKey(collection.primaryKey()); } this.name(options && options.name ? options.name : this._id); this._btree.debug(this._debug); }; Shared.addModule('Index2d', Index2d); Shared.mixin(Index2d.prototype, 'Mixin.Common'); Shared.mixin(Index2d.prototype, 'Mixin.ChainReactor'); Shared.mixin(Index2d.prototype, 'Mixin.Sorting'); Index2d.prototype.id = function () { return this._id; }; Index2d.prototype.state = function () { return this._state; }; Index2d.prototype.size = function () { return this._size; }; Shared.synthesize(Index2d.prototype, 'data'); Shared.synthesize(Index2d.prototype, 'name'); Shared.synthesize(Index2d.prototype, 'collection'); Shared.synthesize(Index2d.prototype, 'type'); Shared.synthesize(Index2d.prototype, 'unique'); Index2d.prototype.keys = function (val) { if (val !== undefined) { this._keys = val; // Count the keys this._keyCount = sharedPathSolver.parse(this._keys).length; return this; } return this._keys; }; Index2d.prototype.rebuild = function () { // Do we have a collection? if (this._collection) { // Get sorted data var collection = this._collection.subset({}, { $decouple: false, $orderBy: this._keys }), collectionData = collection.find(), dataIndex, dataCount = collectionData.length; // Clear the index data for the index this._btree.clear(); this._size = 0; if (this._unique) { this._uniqueLookup = {}; } // Loop the collection data for (dataIndex = 0; dataIndex < dataCount; dataIndex++) { this.insert(collectionData[dataIndex]); } } this._state = { name: this._name, keys: this._keys, indexSize: this._size, built: new Date(), updated: new Date(), ok: true }; }; Index2d.prototype.insert = function (dataItem, options) { var uniqueFlag = this._unique, uniqueHash; dataItem = this.decouple(dataItem); if (uniqueFlag) { uniqueHash = this._itemHash(dataItem, this._keys); this._uniqueLookup[uniqueHash] = dataItem; } // Convert 2d indexed values to geohashes var keys = this._btree.keys(), pathVal, geoHash, lng, lat, i; for (i = 0; i < keys.length; i++) { pathVal = sharedPathSolver.get(dataItem, keys[i].path); if (pathVal instanceof Array) { lng = pathVal[0]; lat = pathVal[1]; geoHash = sharedGeoHashSolver.encode(lng, lat); sharedPathSolver.set(dataItem, keys[i].path, geoHash); } } if (this._btree.insert(dataItem)) { this._size++; return true; } return false; }; Index2d.prototype.remove = function (dataItem, options) { var uniqueFlag = this._unique, uniqueHash; if (uniqueFlag) { uniqueHash = this._itemHash(dataItem, this._keys); delete this._uniqueLookup[uniqueHash]; } if (this._btree.remove(dataItem)) { this._size--; return true; } return false; }; Index2d.prototype.violation = function (dataItem) { // Generate item hash var uniqueHash = this._itemHash(dataItem, this._keys); // Check if the item breaks the unique constraint return Boolean(this._uniqueLookup[uniqueHash]); }; Index2d.prototype.hashViolation = function (uniqueHash) { // Check if the item breaks the unique constraint return Boolean(this._uniqueLookup[uniqueHash]); }; /** * Looks up records that match the passed query and options. * @param query The query to execute. * @param options A query options object. * @param {Operation=} op Optional operation instance that allows * us to provide operation diagnostics and analytics back to the * main calling instance as the process is running. * @returns {*} */ Index2d.prototype.lookup = function (query, options, op) { // Loop the indexed keys and determine if the query has any operators // that we want to handle differently from a standard lookup var keys = this._btree.keys(), pathStr, pathVal, results, i; for (i = 0; i < keys.length; i++) { pathStr = keys[i].path; pathVal = sharedPathSolver.get(query, pathStr); if (typeof pathVal === 'object') { if (pathVal.$near) { results = []; // Do a near point lookup results = results.concat(this.near(pathStr, pathVal.$near, options, op)); } if (pathVal.$geoWithin) { results = []; // Do a geoWithin shape lookup results = results.concat(this.geoWithin(pathStr, pathVal.$geoWithin, options, op)); } return results; } } return this._btree.lookup(query, options); }; Index2d.prototype.near = function (pathStr, query, options, op) { var self = this, neighbours, visitedCount, visitedNodes, visitedData, search, results, finalResults = [], precision, maxDistanceKm, distance, distCache, latLng, pk = this._collection.primaryKey(), i; // Calculate the required precision to encapsulate the distance // TODO: Instead of opting for the "one size larger" than the distance boxes, // TODO: we should calculate closest divisible box size as a multiple and then // TODO: scan neighbours until we have covered the area otherwise we risk // TODO: opening the results up to vastly more information as the box size // TODO: increases dramatically between the geohash precisions if (query.$distanceUnits === 'km') { maxDistanceKm = query.$maxDistance; for (i = 0; i < geoHashDistance.length; i++) { if (maxDistanceKm > geoHashDistance[i]) { precision = i + 1; break; } } } else if (query.$distanceUnits === 'miles') { maxDistanceKm = query.$maxDistance * 1.60934; for (i = 0; i < geoHashDistance.length; i++) { if (maxDistanceKm > geoHashDistance[i]) { precision = i + 1; break; } } } if (precision === 0) { precision = 1; } // Calculate 9 box geohashes if (op) { op.time('index2d.calculateHashArea'); } neighbours = sharedGeoHashSolver.calculateHashArrayByRadius(query.$point, maxDistanceKm, precision); if (op) { op.time('index2d.calculateHashArea'); } if (op) { op.data('index2d.near.precision', precision); op.data('index2d.near.hashArea', neighbours); op.data('index2d.near.maxDistanceKm', maxDistanceKm); op.data('index2d.near.centerPointCoords', [query.$point[0], query.$point[1]]); } // Lookup all matching co-ordinates from the btree results = []; visitedCount = 0; visitedData = {}; visitedNodes = []; if (op) { op.time('index2d.near.getDocsInsideHashArea'); } for (i = 0; i < neighbours.length; i++) { search = this._btree.startsWith(pathStr, neighbours[i]); visitedData[neighbours[i]] = search; visitedCount += search._visitedCount; visitedNodes = visitedNodes.concat(search._visitedNodes); results = results.concat(search); } if (op) { op.time('index2d.near.getDocsInsideHashArea'); op.data('index2d.near.startsWith', visitedData); op.data('index2d.near.visitedTreeNodes', visitedNodes); } // Work with original data if (op) { op.time('index2d.near.lookupDocsById'); } results = this._collection._primaryIndex.lookup(results); if (op) { op.time('index2d.near.lookupDocsById'); } if (query.$distanceField) { // Decouple the results before we modify them results = this.decouple(results); } if (results.length) { distance = {}; if (op) { op.time('index2d.near.calculateDistanceFromCenter'); } // Loop the results and calculate distance for (i = 0; i < results.length; i++) { latLng = sharedPathSolver.get(results[i], pathStr); distCache = distance[results[i][pk]] = this.distanceBetweenPoints(query.$point[0], query.$point[1], latLng[0], latLng[1]); if (distCache <= maxDistanceKm) { if (query.$distanceField) { // Options specify a field to add the distance data to // so add it now sharedPathSolver.set(results[i], query.$distanceField, query.$distanceUnits === 'km' ? distCache : Math.round(distCache * 0.621371)); } if (query.$geoHashField) { // Options specify a field to add the distance data to // so add it now sharedPathSolver.set(results[i], query.$geoHashField, sharedGeoHashSolver.encode(latLng[0], latLng[1], precision)); } // Add item as it is inside radius distance finalResults.push(results[i]); } } if (op) { op.time('index2d.near.calculateDistanceFromCenter'); } // Sort by distance from center if (op) { op.time('index2d.near.sortResultsByDistance'); } finalResults.sort(function (a, b) { return self.sortAsc(distance[a[pk]], distance[b[pk]]); }); if (op) { op.time('index2d.near.sortResultsByDistance'); } } // Return data return finalResults; }; Index2d.prototype.geoWithin = function (pathStr, query, options) { console.log('geoWithin() is currently a prototype method with no actual implementation... it just returns a blank array.'); return []; }; Index2d.prototype.distanceBetweenPoints = function (lat1, lng1, lat2, lng2) { var R = 6371; // kilometres var lat1Rad = this.toRadians(lat1); var lat2Rad = this.toRadians(lat2); var lat2MinusLat1Rad = this.toRadians(lat2-lat1); var lng2MinusLng1Rad = this.toRadians(lng2-lng1); var a = Math.sin(lat2MinusLat1Rad/2) * Math.sin(lat2MinusLat1Rad/2) + Math.cos(lat1Rad) * Math.cos(lat2Rad) * Math.sin(lng2MinusLng1Rad/2) * Math.sin(lng2MinusLng1Rad/2); var c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1-a)); return R * c; }; Index2d.prototype.toRadians = function (degrees) { return degrees * 0.01747722222222; }; Index2d.prototype.match = function (query, options) { // TODO: work out how to represent that this is a better match if the query has $near than // TODO: a basic btree index which will not be able to resolve a $near operator return this._btree.match(query, options); }; Index2d.prototype._itemHash = function (item, keys) { var path = new Path(), pathData, hash = '', k; pathData = path.parse(keys); for (k = 0; k < pathData.length; k++) { if (hash) { hash += '_'; } hash += path.value(item, pathData[k].path).join(':'); } return hash; }; Index2d.prototype._itemKeyHash = function (item, keys) { var path = new Path(), pathData, hash = '', k; pathData = path.parse(keys); for (k = 0; k < pathData.length; k++) { if (hash) { hash += '_'; } hash += path.keyValue(item, pathData[k].path); } return hash; }; Index2d.prototype._itemHashArr = function (item, keys) { var path = new Path(), pathData, //hash = '', hashArr = [], valArr, i, k, j; pathData = path.parse(keys); for (k = 0; k < pathData.length; k++) { valArr = path.value(item, pathData[k].path); for (i = 0; i < valArr.length; i++) { if (k === 0) { // Setup the initial hash array hashArr.push(valArr[i]); } else { // Loop the hash array and concat the value to it for (j = 0; j < hashArr.length; j++) { hashArr[j] = hashArr[j] + '_' + valArr[i]; } } } } return hashArr; }; // Register this index on the shared object Shared.index['2d'] = Index2d; Shared.finishModule('Index2d'); module.exports = Index2d; },{"./BinaryTree":5,"./GeoHash":12,"./Path":34,"./Shared":40}],16:[function(_dereq_,module,exports){ "use strict"; /* name(string) id(string) rebuild(null) state ?? needed? match(query, options) lookup(query, options) insert(doc) remove(doc) primaryKey(string) collection(collection) */ var Shared = _dereq_('./Shared'), Path = _dereq_('./Path'), BinaryTree = _dereq_('./BinaryTree'); /** * The index class used to instantiate btree indexes that the database can * use to speed up queries on collections and views. * @constructor */ var IndexBinaryTree = function () { this.init.apply(this, arguments); }; IndexBinaryTree.prototype.init = function (keys, options, collection) { this._btree = new BinaryTree(); this._btree.index(keys); this._size = 0; this._id = this._itemKeyHash(keys, keys); this._debug = options && options.debug ? options.debug : false; this.unique(options && options.unique ? options.unique : false); if (keys !== undefined) { this.keys(keys); } if (collection !== undefined) { this.collection(collection); this._btree.primaryKey(collection.primaryKey()); } this.name(options && options.name ? options.name : this._id); this._btree.debug(this._debug); }; Shared.addModule('IndexBinaryTree', IndexBinaryTree); Shared.mixin(IndexBinaryTree.prototype, 'Mixin.ChainReactor'); Shared.mixin(IndexBinaryTree.prototype, 'Mixin.Sorting'); IndexBinaryTree.prototype.id = function () { return this._id; }; IndexBinaryTree.prototype.state = function () { return this._state; }; IndexBinaryTree.prototype.size = function () { return this._size; }; Shared.synthesize(IndexBinaryTree.prototype, 'data'); Shared.synthesize(IndexBinaryTree.prototype, 'name'); Shared.synthesize(IndexBinaryTree.prototype, 'collection'); Shared.synthesize(IndexBinaryTree.prototype, 'type'); Shared.synthesize(IndexBinaryTree.prototype, 'unique'); IndexBinaryTree.prototype.keys = function (val) { if (val !== undefined) { this._keys = val; // Count the keys this._keyCount = (new Path()).parse(this._keys).length; return this; } return this._keys; }; IndexBinaryTree.prototype.rebuild = function () { // Do we have a collection? if (this._collection) { // Get sorted data var collection = this._collection.subset({}, { $decouple: false, $orderBy: this._keys }), collectionData = collection.find(), dataIndex, dataCount = collectionData.length; // Clear the index data for the index this._btree.clear(); this._size = 0; if (this._unique) { this._uniqueLookup = {}; } // Loop the collection data for (dataIndex = 0; dataIndex < dataCount; dataIndex++) { this.insert(collectionData[dataIndex]); } } this._state = { name: this._name, keys: this._keys, indexSize: this._size, built: new Date(), updated: new Date(), ok: true }; }; IndexBinaryTree.prototype.insert = function (dataItem, options) { var uniqueFlag = this._unique, uniqueHash; if (uniqueFlag) { uniqueHash = this._itemHash(dataItem, this._keys); this._uniqueLookup[uniqueHash] = dataItem; } if (this._btree.insert(dataItem)) { this._size++; return true; } return false; }; IndexBinaryTree.prototype.remove = function (dataItem, options) { var uniqueFlag = this._unique, uniqueHash; if (uniqueFlag) { uniqueHash = this._itemHash(dataItem, this._keys); delete this._uniqueLookup[uniqueHash]; } if (this._btree.remove(dataItem)) { this._size--; return true; } return false; }; IndexBinaryTree.prototype.violation = function (dataItem) { // Generate item hash var uniqueHash = this._itemHash(dataItem, this._keys); // Check if the item breaks the unique constraint return Boolean(this._uniqueLookup[uniqueHash]); }; IndexBinaryTree.prototype.hashViolation = function (uniqueHash) { // Check if the item breaks the unique constraint return Boolean(this._uniqueLookup[uniqueHash]); }; IndexBinaryTree.prototype.lookup = function (query, options, op) { return this._btree.lookup(query, options, op); }; IndexBinaryTree.prototype.match = function (query, options) { return this._btree.match(query, options); }; IndexBinaryTree.prototype._itemHash = function (item, keys) { var path = new Path(), pathData, hash = '', k; pathData = path.parse(keys); for (k = 0; k < pathData.length; k++) { if (hash) { hash += '_'; } hash += path.value(item, pathData[k].path).join(':'); } return hash; }; IndexBinaryTree.prototype._itemKeyHash = function (item, keys) { var path = new Path(), pathData, hash = '', k; pathData = path.parse(keys); for (k = 0; k < pathData.length; k++) { if (hash) { hash += '_'; } hash += path.keyValue(item, pathData[k].path); } return hash; }; IndexBinaryTree.prototype._itemHashArr = function (item, keys) { var path = new Path(), pathData, //hash = '', hashArr = [], valArr, i, k, j; pathData = path.parse(keys); for (k = 0; k < pathData.length; k++) { valArr = path.value(item, pathData[k].path); for (i = 0; i < valArr.length; i++) { if (k === 0) { // Setup the initial hash array hashArr.push(valArr[i]); } else { // Loop the hash array and concat the value to it for (j = 0; j < hashArr.length; j++) { hashArr[j] = hashArr[j] + '_' + valArr[i]; } } } } return hashArr; }; // Register this index on the shared object Shared.index.btree = IndexBinaryTree; Shared.finishModule('IndexBinaryTree'); module.exports = IndexBinaryTree; },{"./BinaryTree":5,"./Path":34,"./Shared":40}],17:[function(_dereq_,module,exports){ "use strict"; var Shared = _dereq_('./Shared'), Path = _dereq_('./Path'); /** * The index class used to instantiate hash map indexes that the database can * use to speed up queries on collections and views. * @constructor */ var IndexHashMap = function () { this.init.apply(this, arguments); }; IndexHashMap.prototype.init = function (keys, options, collection) { this._crossRef = {}; this._size = 0; this._id = this._itemKeyHash(keys, keys); this.data({}); this.unique(options && options.unique ? options.unique : false); if (keys !== undefined) { this.keys(keys); } if (collection !== undefined) { this.collection(collection); } this.name(options && options.name ? options.name : this._id); }; Shared.addModule('IndexHashMap', IndexHashMap); Shared.mixin(IndexHashMap.prototype, 'Mixin.ChainReactor'); IndexHashMap.prototype.id = function () { return this._id; }; IndexHashMap.prototype.state = function () { return this._state; }; IndexHashMap.prototype.size = function () { return this._size; }; Shared.synthesize(IndexHashMap.prototype, 'data'); Shared.synthesize(IndexHashMap.prototype, 'name'); Shared.synthesize(IndexHashMap.prototype, 'collection'); Shared.synthesize(IndexHashMap.prototype, 'type'); Shared.synthesize(IndexHashMap.prototype, 'unique'); IndexHashMap.prototype.keys = function (val) { if (val !== undefined) { this._keys = val; // Count the keys this._keyCount = (new Path()).parse(this._keys).length; return this; } return this._keys; }; IndexHashMap.prototype.rebuild = function () { // Do we have a collection? if (this._collection) { // Get sorted data var collection = this._collection.subset({}, { $decouple: false, $orderBy: this._keys }), collectionData = collection.find(), dataIndex, dataCount = collectionData.length; // Clear the index data for the index this._data = {}; this._size = 0; if (this._unique) { this._uniqueLookup = {}; } // Loop the collection data for (dataIndex = 0; dataIndex < dataCount; dataIndex++) { this.insert(collectionData[dataIndex]); } } this._state = { name: this._name, keys: this._keys, indexSize: this._size, built: new Date(), updated: new Date(), ok: true }; }; IndexHashMap.prototype.insert = function (dataItem, options) { var uniqueFlag = this._unique, uniqueHash, itemHashArr, hashIndex; if (uniqueFlag) { uniqueHash = this._itemHash(dataItem, this._keys); this._uniqueLookup[uniqueHash] = dataItem; } // Generate item hash itemHashArr = this._itemHashArr(dataItem, this._keys); // Get the path search results and store them for (hashIndex = 0; hashIndex < itemHashArr.length; hashIndex++) { this.pushToPathValue(itemHashArr[hashIndex], dataItem); } }; IndexHashMap.prototype.update = function (dataItem, options) { // TODO: Write updates to work // 1: Get uniqueHash for the dataItem primary key value (may need to generate a store for this) // 2: Remove the uniqueHash as it currently stands // 3: Generate a new uniqueHash for dataItem // 4: Insert the new uniqueHash }; IndexHashMap.prototype.remove = function (dataItem, options) { var uniqueFlag = this._unique, uniqueHash, itemHashArr, hashIndex; if (uniqueFlag) { uniqueHash = this._itemHash(dataItem, this._keys); delete this._uniqueLookup[uniqueHash]; } // Generate item hash itemHashArr = this._itemHashArr(dataItem, this._keys); // Get the path search results and store them for (hashIndex = 0; hashIndex < itemHashArr.length; hashIndex++) { this.pullFromPathValue(itemHashArr[hashIndex], dataItem); } }; IndexHashMap.prototype.violation = function (dataItem) { // Generate item hash var uniqueHash = this._itemHash(dataItem, this._keys); // Check if the item breaks the unique constraint return Boolean(this._uniqueLookup[uniqueHash]); }; IndexHashMap.prototype.hashViolation = function (uniqueHash) { // Check if the item breaks the unique constraint return Boolean(this._uniqueLookup[uniqueHash]); }; IndexHashMap.prototype.pushToPathValue = function (hash, obj) { var pathValArr = this._data[hash] = this._data[hash] || []; // Make sure we have not already indexed this object at this path/value if (pathValArr.indexOf(obj) === -1) { // Index the object pathValArr.push(obj); // Record the reference to this object in our index size this._size++; // Cross-reference this association for later lookup this.pushToCrossRef(obj, pathValArr); } }; IndexHashMap.prototype.pullFromPathValue = function (hash, obj) { var pathValArr = this._data[hash], indexOfObject; // Make sure we have already indexed this object at this path/value indexOfObject = pathValArr.indexOf(obj); if (indexOfObject > -1) { // Un-index the object pathValArr.splice(indexOfObject, 1); // Record the reference to this object in our index size this._size--; // Remove object cross-reference this.pullFromCrossRef(obj, pathValArr); } // Check if we should remove the path value array if (!pathValArr.length) { // Remove the array delete this._data[hash]; } }; IndexHashMap.prototype.pull = function (obj) { // Get all places the object has been used and remove them var id = obj[this._collection.primaryKey()], crossRefArr = this._crossRef[id], arrIndex, arrCount = crossRefArr.length, arrItem; for (arrIndex = 0; arrIndex < arrCount; arrIndex++) { arrItem = crossRefArr[arrIndex]; // Remove item from this index lookup array this._pullFromArray(arrItem, obj); } // Record the reference to this object in our index size this._size--; // Now remove the cross-reference entry for this object delete this._crossRef[id]; }; IndexHashMap.prototype._pullFromArray = function (arr, obj) { var arrCount = arr.length; while (arrCount--) { if (arr[arrCount] === obj) { arr.splice(arrCount, 1); } } }; IndexHashMap.prototype.pushToCrossRef = function (obj, pathValArr) { var id = obj[this._collection.primaryKey()], crObj; this._crossRef[id] = this._crossRef[id] || []; // Check if the cross-reference to the pathVal array already exists crObj = this._crossRef[id]; if (crObj.indexOf(pathValArr) === -1) { // Add the cross-reference crObj.push(pathValArr); } }; IndexHashMap.prototype.pullFromCrossRef = function (obj, pathValArr) { var id = obj[this._collection.primaryKey()]; delete this._crossRef[id]; }; IndexHashMap.prototype.lookup = function (query) { return this._data[this._itemHash(query, this._keys)] || []; }; IndexHashMap.prototype.match = function (query, options) { // Check if the passed query has data in the keys our index // operates on and if so, is the query sort matching our order var pathSolver = new Path(); var indexKeyArr = pathSolver.parseArr(this._keys), queryArr = pathSolver.parseArr(query), matchedKeys = [], matchedKeyCount = 0, i; // Loop the query array and check the order of keys against the // index key array to see if this index can be used for (i = 0; i < indexKeyArr.length; i++) { if (queryArr[i] === indexKeyArr[i]) { matchedKeyCount++; matchedKeys.push(queryArr[i]); } else { // Query match failed - this is a hash map index so partial key match won't work return { matchedKeys: [], totalKeyCount: queryArr.length, score: 0 }; } } return { matchedKeys: matchedKeys, totalKeyCount: queryArr.length, score: matchedKeyCount }; //return pathSolver.countObjectPaths(this._keys, query); }; IndexHashMap.prototype._itemHash = function (item, keys) { var path = new Path(), pathData, hash = '', k; pathData = path.parse(keys); for (k = 0; k < pathData.length; k++) { if (hash) { hash += '_'; } hash += path.value(item, pathData[k].path).join(':'); } return hash; }; IndexHashMap.prototype._itemKeyHash = function (item, keys) { var path = new Path(), pathData, hash = '', k; pathData = path.parse(keys); for (k = 0; k < pathData.length; k++) { if (hash) { hash += '_'; } hash += path.keyValue(item, pathData[k].path); } return hash; }; IndexHashMap.prototype._itemHashArr = function (item, keys) { var path = new Path(), pathData, //hash = '', hashArr = [], valArr, i, k, j; pathData = path.parse(keys); for (k = 0; k < pathData.length; k++) { valArr = path.value(item, pathData[k].path); for (i = 0; i < valArr.length; i++) { if (k === 0) { // Setup the initial hash array hashArr.push(valArr[i]); } else { // Loop the hash array and concat the value to it for (j = 0; j < hashArr.length; j++) { hashArr[j] = hashArr[j] + '_' + valArr[i]; } } } } return hashArr; }; // Register this index on the shared object Shared.index.hashed = IndexHashMap; Shared.finishModule('IndexHashMap'); module.exports = IndexHashMap; },{"./Path":34,"./Shared":40}],18:[function(_dereq_,module,exports){ "use strict"; var Shared = _dereq_('./Shared'); /** * The key value store class used when storing basic in-memory KV data, * and can be queried for quick retrieval. Mostly used for collection * primary key indexes and lookups. * @param {String=} name Optional KV store name. * @param {Object=} options Optional KV store options object. Currently * supports "primaryKey" as a string. * @constructor */ var KeyValueStore = function (name, options) { this.init.apply(this, arguments); }; KeyValueStore.prototype.init = function (name, options) { // Ensure we have options options = options || {}; // Set our internal data settings this._name = name; this._data = {}; this._primaryKey = options.primaryKey || '_id'; }; Shared.addModule('KeyValueStore', KeyValueStore); Shared.mixin(KeyValueStore.prototype, 'Mixin.ChainReactor'); /** * Get / set the name of the key/value store. * @param {String} val The name to set. * @returns {*} */ Shared.synthesize(KeyValueStore.prototype, 'name'); /** * Get / set the primary key. * @param {String} key The key to set. * @returns {*} */ KeyValueStore.prototype.primaryKey = function (key) { if (key !== undefined) { this._primaryKey = key; return this; } return this._primaryKey; }; /** * Removes all data from the store. * @returns {*} */ KeyValueStore.prototype.truncate = function () { this._data = {}; return this; }; /** * Sets data against a key in the store. * @param {String} key The key to set data for. * @param {*} value The value to assign to the key. * @returns {*} */ KeyValueStore.prototype.set = function (key, value) { this._data[key] = value ? value : true; return this; }; /** * Gets data stored for the passed key. * @param {String} key The key to get data for. * @returns {*} */ KeyValueStore.prototype.get = function (key) { return this._data[key]; }; /** * Get / set the primary key. * @param {*} val A lookup query. * @returns {*} */ KeyValueStore.prototype.lookup = function (val) { var pk = this._primaryKey, valType = typeof val, arrIndex, arrCount, lookupItem, result = []; // Check for early exit conditions if (valType === 'string' || valType === 'number') { lookupItem = this.get(val); if (lookupItem !== undefined) { return [lookupItem]; } else { return []; } } else if (valType === 'object') { if (val instanceof Array) { // An array of primary keys, find all matches arrCount = val.length; result = []; for (arrIndex = 0; arrIndex < arrCount; arrIndex++) { lookupItem = this.lookup(val[arrIndex]); if (lookupItem) { if (lookupItem instanceof Array) { result = result.concat(lookupItem); } else { result.push(lookupItem); } } } return result; } else if (val[pk] !== undefined && val[pk] !== null) { return this.lookup(val[pk]); } } // COMMENTED AS CODE WILL NEVER BE REACHED // Complex lookup /*lookupData = this._lookupKeys(val); keys = lookupData.keys; negate = lookupData.negate; if (!negate) { // Loop keys and return values for (arrIndex = 0; arrIndex < keys.length; arrIndex++) { result.push(this.get(keys[arrIndex])); } } else { // Loop data and return non-matching keys for (arrIndex in this._data) { if (this._data.hasOwnProperty(arrIndex)) { if (keys.indexOf(arrIndex) === -1) { result.push(this.get(arrIndex)); } } } } return result;*/ }; // COMMENTED AS WE ARE NOT CURRENTLY PASSING COMPLEX QUERIES TO KEYVALUESTORE INDEXES /*KeyValueStore.prototype._lookupKeys = function (val) { var pk = this._primaryKey, valType = typeof val, arrIndex, arrCount, lookupItem, bool, result; if (valType === 'string' || valType === 'number') { return { keys: [val], negate: false }; } else if (valType === 'object') { if (val instanceof RegExp) { // Create new data result = []; for (arrIndex in this._data) { if (this._data.hasOwnProperty(arrIndex)) { if (val.test(arrIndex)) { result.push(arrIndex); } } } return { keys: result, negate: false }; } else if (val instanceof Array) { // An array of primary keys, find all matches arrCount = val.length; result = []; for (arrIndex = 0; arrIndex < arrCount; arrIndex++) { result = result.concat(this._lookupKeys(val[arrIndex]).keys); } return { keys: result, negate: false }; } else if (val.$in && (val.$in instanceof Array)) { return { keys: this._lookupKeys(val.$in).keys, negate: false }; } else if (val.$nin && (val.$nin instanceof Array)) { return { keys: this._lookupKeys(val.$nin).keys, negate: true }; } else if (val.$ne) { return { keys: this._lookupKeys(val.$ne, true).keys, negate: true }; } else if (val.$or && (val.$or instanceof Array)) { // Create new data result = []; for (arrIndex = 0; arrIndex < val.$or.length; arrIndex++) { result = result.concat(this._lookupKeys(val.$or[arrIndex]).keys); } return { keys: result, negate: false }; } else if (val[pk]) { return this._lookupKeys(val[pk]); } } };*/ /** * Removes data for the given key from the store. * @param {String} key The key to un-set. * @returns {*} */ KeyValueStore.prototype.unSet = function (key) { delete this._data[key]; return this; }; /** * Sets data for the give key in the store only where the given key * does not already have a value in the store. * @param {String} key The key to set data for. * @param {*} value The value to assign to the key. * @returns {Boolean} True if data was set or false if data already * exists for the key. */ KeyValueStore.prototype.uniqueSet = function (key, value) { if (this._data[key] === undefined) { this._data[key] = value; return true; } return false; }; Shared.finishModule('KeyValueStore'); module.exports = KeyValueStore; },{"./Shared":40}],19:[function(_dereq_,module,exports){ "use strict"; var Shared = _dereq_('./Shared'), Operation = _dereq_('./Operation'); /** * The metrics class used to store details about operations. * @constructor */ var Metrics = function () { this.init.apply(this, arguments); }; Metrics.prototype.init = function () { this._data = []; }; Shared.addModule('Metrics', Metrics); Shared.mixin(Metrics.prototype, 'Mixin.ChainReactor'); /** * Creates an operation within the metrics instance and if metrics * are currently enabled (by calling the start() method) the operation * is also stored in the metrics log. * @param {String} name The name of the operation. * @returns {Operation} */ Metrics.prototype.create = function (name) { var op = new Operation(name); if (this._enabled) { this._data.push(op); } return op; }; /** * Starts logging operations. * @returns {Metrics} */ Metrics.prototype.start = function () { this._enabled = true; return this; }; /** * Stops logging operations. * @returns {Metrics} */ Metrics.prototype.stop = function () { this._enabled = false; return this; }; /** * Clears all logged operations. * @returns {Metrics} */ Metrics.prototype.clear = function () { this._data = []; return this; }; /** * Returns an array of all logged operations. * @returns {Array} */ Metrics.prototype.list = function () { return this._data; }; Shared.finishModule('Metrics'); module.exports = Metrics; },{"./Operation":31,"./Shared":40}],20:[function(_dereq_,module,exports){ "use strict"; var CRUD = { preSetData: function () { }, postSetData: function () { } }; module.exports = CRUD; },{}],21:[function(_dereq_,module,exports){ "use strict"; /** * The chain reactor mixin, provides methods to the target object that allow chain * reaction events to propagate to the target and be handled, processed and passed * on down the chain. * @mixin */ var ChainReactor = { /** * Creates a chain link between the current reactor node and the passed * reactor node. Chain packets that are send by this reactor node will * then be propagated to the passed node for subsequent packets. * @param {*} obj The chain reactor node to link to. */ chain: function (obj) { if (this.debug && this.debug()) { if (obj._reactorIn && obj._reactorOut) { console.log(obj._reactorIn.logIdentifier() + ' Adding target "' + obj._reactorOut.instanceIdentifier() + '" to the chain reactor target list'); } else { console.log(this.logIdentifier() + ' Adding target "' + obj.instanceIdentifier() + '" to the chain reactor target list'); } } this._chain = this._chain || []; var index = this._chain.indexOf(obj); if (index === -1) { this._chain.push(obj); } }, /** * Removes a chain link between the current reactor node and the passed * reactor node. Chain packets sent from this reactor node will no longer * be received by the passed node. * @param {*} obj The chain reactor node to unlink from. */ unChain: function (obj) { if (this.debug && this.debug()) { if (obj._reactorIn && obj._reactorOut) { console.log(obj._reactorIn.logIdentifier() + ' Removing target "' + obj._reactorOut.instanceIdentifier() + '" from the chain reactor target list'); } else { console.log(this.logIdentifier() + ' Removing target "' + obj.instanceIdentifier() + '" from the chain reactor target list'); } } if (this._chain) { var index = this._chain.indexOf(obj); if (index > -1) { this._chain.splice(index, 1); } } }, /** * Gets / sets the flag that will enable / disable chain reactor sending * from this instance. Chain reactor sending is enabled by default on all * instances. * @param {Boolean} val True or false to enable or disable chain sending. * @returns {*} */ chainEnabled: function (val) { if (val !== undefined) { this._chainDisabled = !val; return this; } return !this._chainDisabled; }, /** * Determines if this chain reactor node has any listeners downstream. * @returns {Boolean} True if there are nodes downstream of this node. */ chainWillSend: function () { return Boolean(this._chain && !this._chainDisabled); }, /** * Sends a chain reactor packet downstream from this node to any of its * chained targets that were linked to this node via a call to chain(). * @param {String} type The type of chain reactor packet to send. This * can be any string but the receiving reactor nodes will not react to * it unless they recognise the string. Built-in strings include: "insert", * "update", "remove", "setData" and "debug". * @param {Object} data A data object that usually contains a key called * "dataSet" which is an array of items to work on, and can contain other * custom keys that help describe the operation. * @param {Object=} options An options object. Can also contain custom * key/value pairs that your custom chain reactor code can operate on. */ chainSend: function (type, data, options) { if (this._chain && !this._chainDisabled) { var arr = this._chain, arrItem, count = arr.length, index, dataCopy = this.decouple(data, count); for (index = 0; index < count; index++) { arrItem = arr[index]; if (!arrItem._state || (arrItem._state && !arrItem.isDropped())) { if (this.debug && this.debug()) { if (arrItem._reactorIn && arrItem._reactorOut) { console.log(arrItem._reactorIn.logIdentifier() + ' Sending data down the chain reactor pipe to "' + arrItem._reactorOut.instanceIdentifier() + '"'); } else { console.log(this.logIdentifier() + ' Sending data down the chain reactor pipe to "' + arrItem.instanceIdentifier() + '"'); } } if (arrItem.chainReceive) { arrItem.chainReceive(this, type, dataCopy[index], options); } } else { console.log('Reactor Data:', type, data, options); console.log('Reactor Node:', arrItem); throw('Chain reactor attempting to send data to target reactor node that is in a dropped state!'); } } } }, /** * Handles receiving a chain reactor message that was sent via the chainSend() * method. Creates the chain packet object and then allows it to be processed. * @param {Object} sender The node that is sending the packet. * @param {String} type The type of packet. * @param {Object} data The data related to the packet. * @param {Object=} options An options object. */ chainReceive: function (sender, type, data, options) { var chainPacket = { sender: sender, type: type, data: data, options: options }, cancelPropagate = false; if (this.debug && this.debug()) { console.log(this.logIdentifier() + ' Received data from parent reactor node'); } // Check if we have a chain handler method if (this._chainHandler) { // Fire our internal handler cancelPropagate = this._chainHandler(chainPacket); } // Check if we were told to cancel further propagation if (!cancelPropagate) { // Propagate the message down the chain this.chainSend(chainPacket.type, chainPacket.data, chainPacket.options); } } }; module.exports = ChainReactor; },{}],22:[function(_dereq_,module,exports){ "use strict"; var idCounter = 0, Overload = _dereq_('./Overload'), Serialiser = _dereq_('./Serialiser'), Common, serialiser = new Serialiser(), crcTable; crcTable = (function () { var crcTable = [], c, n, k; for (n = 0; n < 256; n++) { c = n; for (k = 0; k < 8; k++) { c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1)); // jshint ignore:line } crcTable[n] = c; } return crcTable; }()); /** * Provides commonly used methods to most classes in ForerunnerDB. * @mixin */ Common = { // Expose the serialiser object so it can be extended with new data handlers. serialiser: serialiser, /** * Generates a JSON serialisation-compatible object instance. After the * instance has been passed through this method, it will be able to survive * a JSON.stringify() and JSON.parse() cycle and still end up as an * instance at the end. Further information about this process can be found * in the ForerunnerDB wiki at: https://github.com/Irrelon/ForerunnerDB/wiki/Serialiser-&-Performance-Benchmarks * @param {*} val The object instance such as "new Date()" or "new RegExp()". */ make: function (val) { // This is a conversion request, hand over to serialiser return serialiser.convert(val); }, /** * Gets / sets data in the item store. The store can be used to set and * retrieve data against a key. Useful for adding arbitrary key/value data * to a collection / view etc and retrieving it later. * @param {String|*} key The key under which to store the passed value or * retrieve the existing stored value. * @param {*=} val Optional value. If passed will overwrite the existing value * stored against the specified key if one currently exists. * @returns {*} */ store: function (key, val) { if (key !== undefined) { if (val !== undefined) { // Store the data this._store = this._store || {}; this._store[key] = val; return this; } if (this._store) { return this._store[key]; } } return undefined; }, /** * Removes a previously stored key/value pair from the item store, set previously * by using the store() method. * @param {String|*} key The key of the key/value pair to remove; * @returns {Common} Returns this for chaining. */ unStore: function (key) { if (key !== undefined) { delete this._store[key]; } return this; }, /** * Returns a non-referenced version of the passed object / array. * @param {Object} data The object or array to return as a non-referenced version. * @param {Number=} copies Optional number of copies to produce. If specified, the return * value will be an array of decoupled objects, each distinct from the other. * @returns {*} */ decouple: function (data, copies) { if (data !== undefined && data !== "") { if (!copies) { return this.jParse(this.jStringify(data)); } else { var i, json = this.jStringify(data), copyArr = []; for (i = 0; i < copies; i++) { copyArr.push(this.jParse(json)); } return copyArr; } } return undefined; }, /** * Parses and returns data from stringified version. * @param {String} data The stringified version of data to parse. * @returns {Object} The parsed JSON object from the data. */ jParse: function (data) { return JSON.parse(data, serialiser.reviver()); }, /** * Converts a JSON object into a stringified version. * @param {Object} data The data to stringify. * @returns {String} The stringified data. */ jStringify: JSON.stringify, /** * Generates a new 16-character hexadecimal unique ID or * generates a new 16-character hexadecimal ID based on * the passed string. Will always generate the same ID * for the same string. * @param {String=} str A string to generate the ID from. * @return {String} */ objectId: function (str) { var id, pow = Math.pow(10, 17); if (!str) { idCounter++; id = (idCounter + ( Math.random() * pow + Math.random() * pow + Math.random() * pow + Math.random() * pow )).toString(16); } else { var val = 0, count = str.length, i; for (i = 0; i < count; i++) { val += str.charCodeAt(i) * pow; } id = val.toString(16); } return id; }, /** * Generates a unique hash for the passed object. * @param {Object} obj The object to generate a hash for. * @returns {String} */ hash: function (obj) { return JSON.stringify(obj); }, /** * Gets / sets debug flag that can enable debug message output to the * console if required. * @param {Boolean} val The value to set debug flag to. * @return {Boolean} True if enabled, false otherwise. */ /** * Sets debug flag for a particular type that can enable debug message * output to the console if required. * @param {String} type The name of the debug type to set flag for. * @param {Boolean} val The value to set debug flag to. * @return {Boolean} True if enabled, false otherwise. */ debug: new Overload([ function () { return this._debug && this._debug.all; }, function (val) { if (val !== undefined) { if (typeof val === 'boolean') { this._debug = this._debug || {}; this._debug.all = val; this.chainSend('debug', this._debug); return this; } else { return (this._debug && this._debug[val]) || (this._db && this._db._debug && this._db._debug[val]) || (this._debug && this._debug.all); } } return this._debug && this._debug.all; }, function (type, val) { if (type !== undefined) { if (val !== undefined) { this._debug = this._debug || {}; this._debug[type] = val; this.chainSend('debug', this._debug); return this; } return (this._debug && this._debug[val]) || (this._db && this._db._debug && this._db._debug[type]); } return this._debug && this._debug.all; } ]), /** * Returns a string describing the class this instance is derived from. * @returns {String} */ classIdentifier: function () { return 'ForerunnerDB.' + this.className; }, /** * Returns a string describing the instance by it's class name and instance * object name. * @returns {String} The instance identifier. */ instanceIdentifier: function () { return '[' + this.className + ']' + this.name(); }, /** * Returns a string used to denote a console log against this instance, * consisting of the class identifier and instance identifier. * @returns {String} The log identifier. */ logIdentifier: function () { return 'ForerunnerDB ' + this.instanceIdentifier(); }, /** * Converts a query object with MongoDB dot notation syntax * to Forerunner's object notation syntax. * @param {Object} obj The object to convert. */ convertToFdb: function (obj) { var varName, splitArr, objCopy, i; for (i in obj) { if (obj.hasOwnProperty(i)) { objCopy = obj; if (i.indexOf('.') > -1) { // Replace .$ with a placeholder before splitting by . char i = i.replace('.$', '[|$|]'); splitArr = i.split('.'); while ((varName = splitArr.shift())) { // Replace placeholder back to original .$ varName = varName.replace('[|$|]', '.$'); if (splitArr.length) { objCopy[varName] = {}; } else { objCopy[varName] = obj[i]; } objCopy = objCopy[varName]; } delete obj[i]; } } } }, /** * Checks if the state is dropped. * @returns {boolean} True when dropped, false otherwise. */ isDropped: function () { return this._state === 'dropped'; }, /** * Registers a timed callback that will overwrite itself if * the same id is used within the timeout period. Useful * for de-bouncing fast-calls. * @param {String} id An ID for the call (use the same one * to debounce the same calls). * @param {Function} callback The callback method to call on * timeout. * @param {Number} timeout The timeout in milliseconds before * the callback is called. */ debounce: function (id, callback, timeout) { var self = this, newData; self._debounce = self._debounce || {}; if (self._debounce[id]) { // Clear timeout for this item clearTimeout(self._debounce[id].timeout); } newData = { callback: callback, timeout: setTimeout(function () { // Delete existing reference delete self._debounce[id]; // Call the callback callback(); }, timeout) }; // Save current data self._debounce[id] = newData; }, /** * Returns a checksum of a string. * @param {String} str The string to checksum. * @return {Number} The checksum generated. */ checksum: function (str) { var crc = 0 ^ (-1), // jshint ignore:line i; for (i = 0; i < str.length; i++) { crc = (crc >>> 8) ^ crcTable[(crc ^ str.charCodeAt(i)) & 0xFF]; // jshint ignore:line } return (crc ^ (-1)) >>> 0; // jshint ignore:line } }; module.exports = Common; },{"./Overload":32,"./Serialiser":39}],23:[function(_dereq_,module,exports){ "use strict"; /** * Provides some database constants. * @mixin */ var Constants = { TYPE_INSERT: 0, TYPE_UPDATE: 1, TYPE_REMOVE: 2, PHASE_BEFORE: 0, PHASE_AFTER: 1 }; module.exports = Constants; },{}],24:[function(_dereq_,module,exports){ "use strict"; var Overload = _dereq_('./Overload'); /** * Provides event emitter functionality including the methods: on, off, once, emit, deferEmit. * @mixin * @name Events */ var Events = { on: new Overload({ /** * Attach an event listener to the passed event. * @name on * @method Events.on * @param {String} event The name of the event to listen for. * @param {Function} listener The method to call when the event is fired. * @returns {*} */ 'string, function': function (event, listener) { this._listeners = this._listeners || {}; this._listeners[event] = this._listeners[event] || {}; this._listeners[event]['*'] = this._listeners[event]['*'] || []; this._listeners[event]['*'].push(listener); return this; }, /** * Attach an event listener to the passed event only if the passed * id matches the document id for the event being fired. * @name on * @method Events.on * @param {String} event The name of the event to listen for. * @param {*} id The document id to match against. * @param {Function} listener The method to call when the event is fired. * @returns {*} */ 'string, *, function': function (event, id, listener) { this._listeners = this._listeners || {}; this._listeners[event] = this._listeners[event] || {}; this._listeners[event][id] = this._listeners[event][id] || []; this._listeners[event][id].push(listener); return this; } }), once: new Overload({ /** * Attach an event listener to the passed event that will be called only once. * @name once * @method Events.once * @param {String} event The name of the event to listen for. * @param {Function} listener The method to call when the event is fired. * @returns {*} */ 'string, function': function (event, listener) { var self = this, fired = false, internalCallback = function () { if (!fired) { self.off(event, internalCallback); listener.apply(self, arguments); fired = true; } }; return this.on(event, internalCallback); }, /** * Attach an event listener to the passed event that will be called only once. * @name once * @method Events.once * @param {String} event The name of the event to listen for. * @param {String} id The document id to match against. * @param {Function} listener The method to call when the event is fired. * @returns {*} */ 'string, *, function': function (event, id, listener) { var self = this, fired = false, internalCallback = function () { if (!fired) { self.off(event, id, internalCallback); listener.apply(self, arguments); fired = true; } }; return this.on(event, id, internalCallback); } }), off: new Overload({ /** * Cancels all event listeners for the passed event. * @name off * @method Events.off * @param {String} event The name of the event. * @returns {*} */ 'string': function (event) { var self = this; if (this._emitting) { this._eventRemovalQueue = this._eventRemovalQueue || []; this._eventRemovalQueue.push(function () { self.off(event); }); } else { if (this._listeners && this._listeners[event] && event in this._listeners) { delete this._listeners[event]; } } return this; }, /** * Cancels the event listener for the passed event and listener function. * @name off * @method Events.off * @param {String} event The event to cancel listener for. * @param {Function} listener The event listener function used in the on() * or once() call to cancel. * @returns {*} */ 'string, function': function (event, listener) { var self = this, arr, index; if (this._emitting) { this._eventRemovalQueue = this._eventRemovalQueue || []; this._eventRemovalQueue.push(function () { self.off(event, listener); }); } else { if (typeof(listener) === 'string') { if (this._listeners && this._listeners[event] && this._listeners[event][listener]) { delete this._listeners[event][listener]; } } else { if (this._listeners && event in this._listeners) { arr = this._listeners[event]['*']; index = arr.indexOf(listener); if (index > -1) { arr.splice(index, 1); } } } } return this; }, /** * Cancels an event listener based on an event name, id and listener function. * @name off * @method Events.off * @param {String} event The event to cancel listener for. * @param {String} id The ID of the event to cancel listening for. * @param {Function} listener The event listener function used in the on() * or once() call to cancel. */ 'string, *, function': function (event, id, listener) { var self = this; if (this._emitting) { this._eventRemovalQueue = this._eventRemovalQueue || []; this._eventRemovalQueue.push(function () { self.off(event, id, listener); }); } else { if (this._listeners && event in this._listeners && id in this.listeners[event]) { var arr = this._listeners[event][id], index = arr.indexOf(listener); if (index > -1) { arr.splice(index, 1); } } } }, /** * Cancels all listeners for an event based on the passed event name and id. * @name off * @method Events.off * @param {String} event The event name to cancel listeners for. * @param {*} id The ID to cancel all listeners for. */ 'string, *': function (event, id) { var self = this; if (this._emitting) { this._eventRemovalQueue = this._eventRemovalQueue || []; this._eventRemovalQueue.push(function () { self.off(event, id); }); } else { if (this._listeners && event in this._listeners && id in this._listeners[event]) { // Kill all listeners for this event id delete this._listeners[event][id]; } } } }), /** * Emit an event with data. * @name emit * @method Events.emit * @param {String} event The event to emit. * @param {*} data Data to emit with the event. * @returns {*} */ emit: function (event, data) { this._listeners = this._listeners || {}; this._emitting = true; if (event in this._listeners) { var arrIndex, arrCount, tmpFunc, arr, listenerIdArr, listenerIdCount, listenerIdIndex; // Handle global emit if (this._listeners[event]['*']) { arr = this._listeners[event]['*']; arrCount = arr.length; for (arrIndex = 0; arrIndex < arrCount; arrIndex++) { // Check we have a function to execute tmpFunc = arr[arrIndex]; if (typeof tmpFunc === 'function') { tmpFunc.apply(this, Array.prototype.slice.call(arguments, 1)); } } } // Handle individual emit if (data instanceof Array) { // Check if the array is an array of objects in the collection if (data[0] && data[0][this._primaryKey]) { // Loop the array and check for listeners against the primary key listenerIdArr = this._listeners[event]; arrCount = data.length; for (arrIndex = 0; arrIndex < arrCount; arrIndex++) { if (listenerIdArr[data[arrIndex][this._primaryKey]]) { // Emit for this id listenerIdCount = listenerIdArr[data[arrIndex][this._primaryKey]].length; for (listenerIdIndex = 0; listenerIdIndex < listenerIdCount; listenerIdIndex++) { tmpFunc = listenerIdArr[data[arrIndex][this._primaryKey]][listenerIdIndex]; if (typeof tmpFunc === 'function') { listenerIdArr[data[arrIndex][this._primaryKey]][listenerIdIndex].apply(this, Array.prototype.slice.call(arguments, 1)); } } } } } } } this._emitting = false; this._processRemovalQueue(); return this; }, /** * If events are cleared with the off() method while the event emitter is * actively processing any events then the off() calls get added to a * queue to be executed after the event emitter is finished. This stops * errors that might occur by potentially modifying the event queue while * the emitter is running through them. This method is called after the * event emitter is finished processing. * @name _processRemovalQueue * @method Events._processRemovalQueue * @private */ _processRemovalQueue: function () { var i; if (this._eventRemovalQueue && this._eventRemovalQueue.length) { // Execute each removal call for (i = 0; i < this._eventRemovalQueue.length; i++) { this._eventRemovalQueue[i](); } // Clear the removal queue this._eventRemovalQueue = []; } }, /** * Queues an event to be fired. This has automatic de-bouncing so that any * events of the same type that occur within 100 milliseconds of a previous * one will all be wrapped into a single emit rather than emitting tons of * events for lots of chained inserts etc. Only the data from the last * de-bounced event will be emitted. * @name deferEmit * @method Events.deferEmit * @param {String} eventName The name of the event to emit. * @param {*=} data Optional data to emit with the event. */ deferEmit: function (eventName, data) { var self = this, args; if (!this._noEmitDefer && (!this._db || (this._db && !this._db._noEmitDefer))) { args = arguments; // Check for an existing timeout this._deferTimeout = this._deferTimeout || {}; if (this._deferTimeout[eventName]) { clearTimeout(this._deferTimeout[eventName]); } // Set a timeout this._deferTimeout[eventName] = setTimeout(function () { if (self.debug()) { console.log(self.logIdentifier() + ' Emitting ' + args[0]); } self.emit.apply(self, args); }, 1); } else { this.emit.apply(this, arguments); } return this; } }; module.exports = Events; },{"./Overload":32}],25:[function(_dereq_,module,exports){ "use strict"; /** * Provides object matching algorithm methods. * @mixin */ var Matching = { /** * Internal method that checks a document against a test object. * @param {*} source The source object or value to test against. * @param {*} test The test object or value to test with. * @param {Object} queryOptions The options the query was passed with. * @param {String=} opToApply The special operation to apply to the test such * as 'and' or an 'or' operator. * @param {Object=} options An object containing options to apply to the * operation such as limiting the fields returned etc. * @returns {Boolean} True if the test was positive, false on negative. * @private */ _match: function (source, test, queryOptions, opToApply, options) { // TODO: This method is quite long, break into smaller pieces var operation, applyOp = opToApply, recurseVal, tmpIndex, sourceType = typeof source, testType = typeof test, matchedAll = true, opResult, substringCache, i; if (sourceType === 'object' && source === null) { sourceType = 'null'; } if (testType === 'object' && test === null) { testType = 'null'; } options = options || {}; queryOptions = queryOptions || {}; // Check if options currently holds a root query object if (!options.$rootQuery) { // Root query not assigned, hold the root query options.$rootQuery = test; } // Check if options currently holds a root source object if (!options.$rootSource) { // Root query not assigned, hold the root query options.$rootSource = source; } // Assign current query data options.$currentQuery = test; options.$rootData = options.$rootData || {}; // Check if the comparison data are both strings or numbers if ((sourceType === 'string' || sourceType === 'number' || sourceType === 'null') && (testType === 'string' || testType === 'number' || testType === 'null')) { // The source and test data are flat types that do not require recursive searches, // so just compare them and return the result if (sourceType === 'number' || sourceType === 'null' || testType === 'null') { // Number or null comparison if (source !== test) { matchedAll = false; } } else { // String comparison // TODO: We can probably use a queryOptions.$locale as a second parameter here // TODO: to satisfy https://github.com/Irrelon/ForerunnerDB/issues/35 if (source.localeCompare(test)) { matchedAll = false; } } } else if ((sourceType === 'string' || sourceType === 'number') && (testType === 'object' && test instanceof RegExp)) { if (!test.test(source)) { matchedAll = false; } } else { for (i in test) { if (test.hasOwnProperty(i)) { // Assign previous query data options.$previousQuery = options.$parent; // Assign parent query data options.$parent = { query: test[i], key: i, parent: options.$previousQuery }; // Reset operation flag operation = false; // Grab first two chars of the key name to check for $ substringCache = i.substr(0, 2); // Check if the property is a comment (ignorable) if (substringCache === '//') { // Skip this property continue; } // Check if the property starts with a dollar (function) if (substringCache.indexOf('$') === 0) { // Ask the _matchOp method to handle the operation opResult = this._matchOp(i, source, test[i], queryOptions, options); // Check the result of the matchOp operation // If the result is -1 then no operation took place, otherwise the result // will be a boolean denoting a match (true) or no match (false) if (opResult > -1) { if (opResult) { if (opToApply === 'or') { return true; } } else { // Set the matchedAll flag to the result of the operation // because the operation did not return true matchedAll = opResult; } // Record that an operation was handled operation = true; } } // Check for regex if (!operation && test[i] instanceof RegExp) { operation = true; if (sourceType === 'object' && source[i] !== undefined && test[i].test(source[i])) { if (opToApply === 'or') { return true; } } else { matchedAll = false; } } if (!operation) { // Check if our query is an object if (typeof(test[i]) === 'object') { // Because test[i] is an object, source must also be an object // Check if our source data we are checking the test query against // is an object or an array if (source[i] !== undefined) { if (source[i] instanceof Array && !(test[i] instanceof Array)) { // The source data is an array, so check each item until a // match is found recurseVal = false; for (tmpIndex = 0; tmpIndex < source[i].length; tmpIndex++) { recurseVal = this._match(source[i][tmpIndex], test[i], queryOptions, applyOp, options); if (recurseVal) { // One of the array items matched the query so we can // include this item in the results, so break now break; } } if (recurseVal) { if (opToApply === 'or') { return true; } } else { matchedAll = false; } } else if (!(source[i] instanceof Array) && test[i] instanceof Array) { // The test key data is an array and the source key data is not so check // each item in the test key data to see if the source item matches one // of them. This is effectively an $in search. recurseVal = false; for (tmpIndex = 0; tmpIndex < test[i].length; tmpIndex++) { recurseVal = this._match(source[i], test[i][tmpIndex], queryOptions, applyOp, options); if (recurseVal) { // One of the array items matched the query so we can // include this item in the results, so break now break; } } if (recurseVal) { if (opToApply === 'or') { return true; } } else { matchedAll = false; } } else if (typeof(source) === 'object') { // Recurse down the object tree recurseVal = this._match(source[i], test[i], queryOptions, applyOp, options); if (recurseVal) { if (opToApply === 'or') { return true; } } else { matchedAll = false; } } else { recurseVal = this._match(undefined, test[i], queryOptions, applyOp, options); if (recurseVal) { if (opToApply === 'or') { return true; } } else { matchedAll = false; } } } else { // First check if the test match is an $exists if (test[i] && test[i].$exists !== undefined) { // Push the item through another match recurse recurseVal = this._match(undefined, test[i], queryOptions, applyOp, options); if (recurseVal) { if (opToApply === 'or') { return true; } } else { matchedAll = false; } } else { matchedAll = false; } } } else { // Check if the prop matches our test value if (source && source[i] === test[i]) { if (opToApply === 'or') { return true; } } else if (source && source[i] && source[i] instanceof Array && test[i] && typeof(test[i]) !== "object") { // We are looking for a value inside an array // The source data is an array, so check each item until a // match is found recurseVal = false; for (tmpIndex = 0; tmpIndex < source[i].length; tmpIndex++) { recurseVal = this._match(source[i][tmpIndex], test[i], queryOptions, applyOp, options); if (recurseVal) { // One of the array items matched the query so we can // include this item in the results, so break now break; } } if (recurseVal) { if (opToApply === 'or') { return true; } } else { matchedAll = false; } } else { matchedAll = false; } } } if (opToApply === 'and' && !matchedAll) { return false; } } } } return matchedAll; }, /** * Internal method, performs a matching process against a query operator such as $gt or $nin. * @param {String} key The property name in the test that matches the operator to perform * matching against. * @param {*} source The source data to match the query against. * @param {*} test The query to match the source against. * @param {Object} queryOptions The options the query was passed with. * @param {Object=} options An options object. * @returns {*} * @private */ _matchOp: function (key, source, test, queryOptions, options) { // Check for commands switch (key) { case '$gt': // Greater than return source > test; case '$gte': // Greater than or equal return source >= test; case '$lt': // Less than return source < test; case '$lte': // Less than or equal return source <= test; case '$exists': // Property exists return (source === undefined) !== test; case '$eq': // Equals return source == test; // jshint ignore:line case '$eeq': // Equals equals return source === test; case '$ne': // Not equals return source != test; // jshint ignore:line case '$nee': // Not equals equals return source !== test; case '$not': // Not operator return !this._match(source, test, queryOptions, 'and', options); case '$or': // Match true on ANY check to pass for (var orIndex = 0; orIndex < test.length; orIndex++) { if (this._match(source, test[orIndex], queryOptions, 'and', options)) { return true; } } return false; case '$and': // Match true on ALL checks to pass for (var andIndex = 0; andIndex < test.length; andIndex++) { if (!this._match(source, test[andIndex], queryOptions, 'and', options)) { return false; } } return true; case '$in': // In // Check that the in test is an array if (test instanceof Array) { var inArr = test, inArrCount = inArr.length, inArrIndex; for (inArrIndex = 0; inArrIndex < inArrCount; inArrIndex++) { if (this._match(source, inArr[inArrIndex], queryOptions, 'and', options)) { return true; } } return false; } else if (typeof test === 'object') { return this._match(source, test, queryOptions, 'and', options); } else { console.log(this.logIdentifier() + ' Cannot use an $in operator on a non-array key: ' + key, options.$rootQuery); return false; } break; case '$nin': // Not in // Check that the not-in test is an array if (test instanceof Array) { var notInArr = test, notInArrCount = notInArr.length, notInArrIndex; for (notInArrIndex = 0; notInArrIndex < notInArrCount; notInArrIndex++) { if (this._match(source, notInArr[notInArrIndex], queryOptions, 'and', options)) { return false; } } return true; } else if (typeof test === 'object') { return this._match(source, test, queryOptions, 'and', options); } else { console.log(this.logIdentifier() + ' Cannot use a $nin operator on a non-array key: ' + key, options.$rootQuery); return false; } break; case '$fastIn': if (test instanceof Array) { // Source is a string or number, use indexOf to identify match in array return test.indexOf(source) !== -1; } else { console.log(this.logIdentifier() + ' Cannot use an $fastIn operator on a non-array key: ' + key, options.$rootQuery); return false; } break; case '$distinct': var lookupPath, value, finalDistinctProp; // Ensure options holds a distinct lookup options.$rootData['//distinctLookup'] = options.$rootData['//distinctLookup'] || {}; for (var distinctProp in test) { if (test.hasOwnProperty(distinctProp)) { if (typeof test[distinctProp] === 'object') { // Get the path string from the object lookupPath = this.sharedPathSolver.parse(test)[0].path; // Use the path string to find the lookup value from the source data value = this.sharedPathSolver.get(source, lookupPath); finalDistinctProp = lookupPath; } else { value = source[distinctProp]; finalDistinctProp = distinctProp; } options.$rootData['//distinctLookup'][finalDistinctProp] = options.$rootData['//distinctLookup'][finalDistinctProp] || {}; // Check if the options distinct lookup has this field's value if (options.$rootData['//distinctLookup'][finalDistinctProp][value]) { // Value is already in use return false; } else { // Set the value in the lookup options.$rootData['//distinctLookup'][finalDistinctProp][value] = true; // Allow the item in the results return true; } } } break; case '$count': var countKey, countArr, countVal; // Iterate the count object's keys for (countKey in test) { if (test.hasOwnProperty(countKey)) { // Check the property exists and is an array. If the property being counted is not // an array (or doesn't exist) then use a value of zero in any further count logic countArr = source[countKey]; if (typeof countArr === 'object' && countArr instanceof Array) { countVal = countArr.length; } else { countVal = 0; } // Now recurse down the query chain further to satisfy the query for this key (countKey) if (!this._match(countVal, test[countKey], queryOptions, 'and', options)) { return false; } } } // Allow the item in the results return true; case '$find': case '$findOne': case '$findSub': var fromType = 'collection', findQuery, findOptions, subQuery, subOptions, subPath, result, operation = {}; // Check all parts of the $find operation exist if (!test.$from) { throw(key + ' missing $from property!'); } if (test.$fromType) { fromType = test.$fromType; // Check the fromType exists as a method if (!this.db()[fromType] || typeof this.db()[fromType] !== 'function') { throw(key + ' cannot operate against $fromType "' + fromType + '" because the database does not recognise this type of object!'); } } // Perform the find operation findQuery = test.$query || {}; findOptions = test.$options || {}; if (key === '$findSub') { if (!test.$path) { throw(key + ' missing $path property!'); } subPath = test.$path; subQuery = test.$subQuery || {}; subOptions = test.$subOptions || {}; if (options.$parent && options.$parent.parent && options.$parent.parent.key) { result = this.db()[fromType](test.$from).findSub(findQuery, subPath, subQuery, subOptions); } else { // This is a root $find* query // Test the source against the main findQuery if (this._match(source, findQuery, {}, 'and', options)) { result = this._findSub([source], subPath, subQuery, subOptions); } return result && result.length > 0; } } else { result = this.db()[fromType](test.$from)[key.substr(1)](findQuery, findOptions); } operation[options.$parent.parent.key] = result; return this._match(source, operation, queryOptions, 'and', options); } return -1; }, /** * Performs a join operation and returns the final joined data. * @param {Array | Object} docArr An array of objects to run the join * operation against or a single object. * @param {Array} joinClause The join clause object array (the array in * the $join key of a normal join options object). * @param {Object} joinSource An object containing join source reference * data or a blank object if you are doing a bespoke join operation. * @param {Object} options An options object or blank object if no options. * @returns {Array} * @private */ applyJoin: function (docArr, joinClause, joinSource, options) { var self = this, joinSourceIndex, joinSourceKey, joinMatch, joinSourceType, joinSourceIdentifier, resultKeyName, joinSourceInstance, resultIndex, joinSearchQuery, joinMulti, joinRequire, joinPrefix, joinMatchIndex, joinMatchData, joinSearchOptions, joinFindResults, joinFindResult, joinItem, resultRemove = [], l; if (!(docArr instanceof Array)) { // Turn the document into an array docArr = [docArr]; } for (joinSourceIndex = 0; joinSourceIndex < joinClause.length; joinSourceIndex++) { for (joinSourceKey in joinClause[joinSourceIndex]) { if (joinClause[joinSourceIndex].hasOwnProperty(joinSourceKey)) { // Get the match data for the join joinMatch = joinClause[joinSourceIndex][joinSourceKey]; // Check if the join is to a collection (default) or a specified source type // e.g 'view' or 'collection' joinSourceType = joinMatch.$sourceType || 'collection'; joinSourceIdentifier = '$' + joinSourceType + '.' + joinSourceKey; // Set the key to store the join result in to the collection name by default // can be overridden by the '$as' clause in the join object resultKeyName = joinSourceKey; // Get the join collection instance from the DB if (joinSource[joinSourceIdentifier]) { // We have a joinSource for this identifier already (given to us by // an index when we analysed the query earlier on) and we can use // that source instead. joinSourceInstance = joinSource[joinSourceIdentifier]; } else { // We do not already have a joinSource so grab the instance from the db if (this._db[joinSourceType] && typeof this._db[joinSourceType] === 'function') { joinSourceInstance = this._db[joinSourceType](joinSourceKey); } } // Loop our result data array for (resultIndex = 0; resultIndex < docArr.length; resultIndex++) { // Loop the join conditions and build a search object from them joinSearchQuery = {}; joinMulti = false; joinRequire = false; joinPrefix = ''; for (joinMatchIndex in joinMatch) { if (joinMatch.hasOwnProperty(joinMatchIndex)) { joinMatchData = joinMatch[joinMatchIndex]; // Check the join condition name for a special command operator if (joinMatchIndex.substr(0, 1) === '$') { // Special command switch (joinMatchIndex) { case '$where': if (joinMatchData.$query || joinMatchData.$options) { if (joinMatchData.$query) { // Commented old code here, new one does dynamic reverse lookups //joinSearchQuery = joinMatchData.query; joinSearchQuery = self.resolveDynamicQuery(joinMatchData.$query, docArr[resultIndex]); } if (joinMatchData.$options) { joinSearchOptions = joinMatchData.$options; } } else { throw('$join $where clause requires "$query" and / or "$options" keys to work!'); } break; case '$as': // Rename the collection when stored in the result document resultKeyName = joinMatchData; break; case '$multi': // Return an array of documents instead of a single matching document joinMulti = joinMatchData; break; case '$require': // Remove the result item if no matching join data is found joinRequire = joinMatchData; break; case '$prefix': // Add a prefix to properties mixed in joinPrefix = joinMatchData; break; default: break; } } else { // Get the data to match against and store in the search object // Resolve complex referenced query joinSearchQuery[joinMatchIndex] = self.resolveDynamicQuery(joinMatchData, docArr[resultIndex]); } } } // Do a find on the target collection against the match data joinFindResults = joinSourceInstance.find(joinSearchQuery, joinSearchOptions); // Check if we require a joined row to allow the result item if (!joinRequire || (joinRequire && joinFindResults[0])) { // Join is not required or condition is met if (resultKeyName === '$root') { // The property name to store the join results in is $root // which means we need to mixin the results but this only // works if joinMulti is disabled if (joinMulti !== false) { // Throw an exception here as this join is not physically possible! throw(this.logIdentifier() + ' Cannot combine [$as: "$root"] with [$multi: true] in $join clause!'); } // Mixin the result joinFindResult = joinFindResults[0]; joinItem = docArr[resultIndex]; for (l in joinFindResult) { if (joinFindResult.hasOwnProperty(l) && joinItem[joinPrefix + l] === undefined) { // Properties are only mixed in if they do not already exist // in the target item (are undefined). Using a prefix denoted via // $prefix is a good way to prevent property name conflicts joinItem[joinPrefix + l] = joinFindResult[l]; } } } else { docArr[resultIndex][resultKeyName] = joinMulti === false ? joinFindResults[0] : joinFindResults; } } else { // Join required but condition not met, add item to removal queue resultRemove.push(resultIndex); } } } } } return resultRemove; }, /** * Takes a query object with dynamic references and converts the references * into actual values from the references source. * @param {Object} query The query object with dynamic references. * @param {Object} item The document to apply the references to. * @returns {*} * @private */ resolveDynamicQuery: function (query, item) { var self = this, newQuery, propType, propVal, pathResult, i; // Check for early exit conditions if (typeof query === 'string') { // Check if the property name starts with a back-reference if (query.substr(0, 3) === '$$.') { // Fill the query with a back-referenced value pathResult = this.sharedPathSolver.value(item, query.substr(3, query.length - 3)); } else { pathResult = this.sharedPathSolver.value(item, query); } if (pathResult.length > 1) { return {$in: pathResult}; } else { return pathResult[0]; } } newQuery = {}; for (i in query) { if (query.hasOwnProperty(i)) { propType = typeof query[i]; propVal = query[i]; switch (propType) { case 'string': // Check if the property name starts with a back-reference if (propVal.substr(0, 3) === '$$.') { // Fill the query with a back-referenced value newQuery[i] = this.sharedPathSolver.value(item, propVal.substr(3, propVal.length - 3))[0]; } else { newQuery[i] = propVal; } break; case 'object': newQuery[i] = self.resolveDynamicQuery(propVal, item); break; default: newQuery[i] = propVal; break; } } } return newQuery; }, spliceArrayByIndexList: function (arr, list) { var i; for (i = list.length - 1; i >= 0; i--) { arr.splice(list[i], 1); } } }; module.exports = Matching; },{}],26:[function(_dereq_,module,exports){ "use strict"; /** * Provides sorting methods. * @mixin */ var Sorting = { /** * Sorts the passed value a against the passed value b ascending. * @param {*} a The first value to compare. * @param {*} b The second value to compare. * @returns {*} 1 if a is sorted after b, -1 if a is sorted before b. */ sortAsc: function mixinSortingSortAsc (a, b) { if (typeof(a) === 'string' && typeof(b) === 'string') { return a.localeCompare(b); } else { if (a > b) { return 1; } else if (a < b) { return -1; } } if (a === undefined && b !== undefined) { return -1; } if (b === undefined && a !== undefined) { return 1; } return 0; }, /** * Sorts the passed value a against the passed value b descending. * @param {*} a The first value to compare. * @param {*} b The second value to compare. * @returns {*} 1 if a is sorted after b, -1 if a is sorted before b. */ sortDesc: function mixinSortingSortDesc (a, b) { if (typeof(a) === 'string' && typeof(b) === 'string') { return b.localeCompare(a); } else { if (a > b) { return -1; } else if (a < b) { return 1; } } if (a === undefined && b !== undefined) { return 1; } if (b === undefined && a !== undefined) { return -1; } return 0; }, /** * Sorts the passed value a against the passed value b ascending. This variant * of the sortAsc method will not consider undefined values as lower than any * other value. * @param {*} a The first value to compare. * @param {*} b The second value to compare. * @returns {*} 1 if a is sorted after b, -1 if a is sorted before b. */ sortAscIgnoreUndefined: function (a, b) { if (typeof(a) === 'string' && typeof(b) === 'string') { return a.localeCompare(b); } else { if (a > b) { return 1; } else if (a < b) { return -1; } } return 0; }, /** * Sorts the passed value a against the passed value b descending. This variant * of the sortDesc method will not consider undefined values as lower than any * other value. * @param {*} a The first value to compare. * @param {*} b The second value to compare. * @returns {*} 1 if a is sorted after b, -1 if a is sorted before b. */ sortDescIgnoreUndefined: function (a, b) { if (typeof(a) === 'string' && typeof(b) === 'string') { return b.localeCompare(a); } else { if (a > b) { return -1; } else if (a < b) { return 1; } } return 0; } }; module.exports = Sorting; },{}],27:[function(_dereq_,module,exports){ "use strict"; var Tags, tagMap = {}; /** * Provides class instance tagging and tag operation methods. * @mixin */ Tags = { /** * Tags a class instance for later lookup. * @param {String} name The tag to add. * @returns {boolean} */ tagAdd: function (name) { var i, self = this, mapArr = tagMap[name] = tagMap[name] || []; for (i = 0; i < mapArr.length; i++) { if (mapArr[i] === self) { return true; } } mapArr.push(self); // Hook the drop event for this so we can react if (self.on) { self.on('drop', function () { // We've been dropped so remove ourselves from the tag map self.tagRemove(name); }); } return true; }, /** * Removes a tag from a class instance. * @param {String} name The tag to remove. * @returns {boolean} */ tagRemove: function (name) { var i, mapArr = tagMap[name]; if (mapArr) { for (i = 0; i < mapArr.length; i++) { if (mapArr[i] === this) { mapArr.splice(i, 1); return true; } } } return false; }, /** * Gets an array of all instances tagged with the passed tag name. * @param {String} name The tag to lookup. * @returns {Array} The array of instances that have the passed tag. */ tagLookup: function (name) { return tagMap[name] || []; }, /** * Drops all instances that are tagged with the passed tag name. * @param {String} name The tag to lookup. * @param {Function} callback Callback once dropping has completed * for all instances that match the passed tag name. * @returns {boolean} */ tagDrop: function (name, callback) { var arr = this.tagLookup(name), dropCb, dropCount, i; dropCb = function () { dropCount--; if (callback && dropCount === 0) { callback(false); } }; if (arr.length) { dropCount = arr.length; // Loop the array and drop all items for (i = arr.length - 1; i >= 0; i--) { arr[i].drop(dropCb); } } return true; } }; module.exports = Tags; },{}],28:[function(_dereq_,module,exports){ "use strict"; var Overload = _dereq_('./Overload'); /** * Provides trigger functionality methods. * @mixin */ var Triggers = { /** * Add a trigger by id, type and phase. * @name addTrigger * @method Triggers.addTrigger * @param {String} id The id of the trigger. This must be unique to the type and * phase of the trigger. Only one trigger may be added with this id per type and * phase. * @param {Constants} type The type of operation to apply the trigger to. See * Mixin.Constants for constants to use. * @param {Constants} phase The phase of an operation to fire the trigger on. See * Mixin.Constants for constants to use. * @param {Triggers.addTriggerCallback} method The method to call when the trigger is fired. * @returns {boolean} True if the trigger was added successfully, false if not. */ addTrigger: function (id, type, phase, method) { var self = this, triggerIndex; // Check if the trigger already exists triggerIndex = self._triggerIndexOf(id, type, phase); if (triggerIndex === -1) { self.triggerStack = {}; // The trigger does not exist, create it self._trigger = self._trigger || {}; self._trigger[type] = self._trigger[type] || {}; self._trigger[type][phase] = self._trigger[type][phase] || []; self._trigger[type][phase].push({ id: id, method: method, enabled: true }); return true; } return false; }, /** * Removes a trigger by id, type and phase. * @name removeTrigger * @method Triggers.removeTrigger * @param {String} id The id of the trigger to remove. * @param {Number} type The type of operation to remove the trigger from. See * Mixin.Constants for constants to use. * @param {Number} phase The phase of the operation to remove the trigger from. * See Mixin.Constants for constants to use. * @returns {boolean} True if removed successfully, false if not. */ removeTrigger: function (id, type, phase) { var self = this, triggerIndex; // Check if the trigger already exists triggerIndex = self._triggerIndexOf(id, type, phase); if (triggerIndex > -1) { // The trigger exists, remove it self._trigger[type][phase].splice(triggerIndex, 1); } return false; }, /** * Tells the current instance to fire or ignore all triggers whether they * are enabled or not. * @param {Boolean} val Set to true to ignore triggers or false to not * ignore them. * @returns {*} */ ignoreTriggers: function (val) { if (val !== undefined) { this._ignoreTriggers = val; return this; } return this._ignoreTriggers; }, /** * Generates triggers that fire in the after phase for all CRUD ops * that automatically transform data back and forth and keep both * import and export collections in sync with each other. * @param {String} id The unique id for this link IO. * @param {Object} ioData The settings for the link IO. */ addLinkIO: function (id, ioData) { var self = this, matchAll, exportData, importData, exportTriggerMethod, importTriggerMethod, exportTo, importFrom, allTypes, i; // Store the linkIO self._linkIO = self._linkIO || {}; self._linkIO[id] = ioData; exportData = ioData['export']; importData = ioData['import']; if (exportData) { exportTo = self.db().collection(exportData.to); } if (importData) { importFrom = self.db().collection(importData.from); } allTypes = [ self.TYPE_INSERT, self.TYPE_UPDATE, self.TYPE_REMOVE ]; matchAll = function (data, callback) { // Match all callback(false, true); }; if (exportData) { // Check for export match method if (!exportData.match) { // No match method found, use the match all method exportData.match = matchAll; } // Check for export types if (!exportData.types) { exportData.types = allTypes; } exportTriggerMethod = function (operation, oldDoc, newDoc) { // Check if we should execute against this data exportData.match(newDoc, function (err, doExport) { if (!err && doExport) { // Get data to upsert (if any) exportData.data(newDoc, operation.type, function (err, data, callback) { if (!err && data) { // Disable all currently enabled triggers so that we // don't go into a trigger loop exportTo.ignoreTriggers(true); if (operation.type !== 'remove') { // Do upsert exportTo.upsert(data, callback); } else { // Do remove exportTo.remove(data, callback); } // Re-enable the previous triggers exportTo.ignoreTriggers(false); } }); } }); }; } if (importData) { // Check for import match method if (!importData.match) { // No match method found, use the match all method importData.match = matchAll; } // Check for import types if (!importData.types) { importData.types = allTypes; } importTriggerMethod = function (operation, oldDoc, newDoc) { // Check if we should execute against this data importData.match(newDoc, function (err, doExport) { if (!err && doExport) { // Get data to upsert (if any) importData.data(newDoc, operation.type, function (err, data, callback) { if (!err && data) { // Disable all currently enabled triggers so that we // don't go into a trigger loop exportTo.ignoreTriggers(true); if (operation.type !== 'remove') { // Do upsert self.upsert(data, callback); } else { // Do remove self.remove(data, callback); } // Re-enable the previous triggers exportTo.ignoreTriggers(false); } }); } }); }; } if (exportData) { for (i = 0; i < exportData.types.length; i++) { self.addTrigger(id + 'export' + exportData.types[i], exportData.types[i], self.PHASE_AFTER, exportTriggerMethod); } } if (importData) { for (i = 0; i < importData.types.length; i++) { importFrom.addTrigger(id + 'import' + importData.types[i], importData.types[i], self.PHASE_AFTER, importTriggerMethod); } } }, /** * Removes a previously added link IO via it's ID. * @param {String} id The id of the link IO to remove. * @returns {boolean} True if successful, false if the link IO * was not found. */ removeLinkIO: function (id) { var self = this, linkIO = self._linkIO[id], exportData, importData, importFrom, i; if (linkIO) { exportData = linkIO['export']; importData = linkIO['import']; if (exportData) { for (i = 0; i < exportData.types.length; i++) { self.removeTrigger(id + 'export' + exportData.types[i], exportData.types[i], self.db.PHASE_AFTER); } } if (importData) { importFrom = self.db().collection(importData.from); for (i = 0; i < importData.types.length; i++) { importFrom.removeTrigger(id + 'import' + importData.types[i], importData.types[i], self.db.PHASE_AFTER); } } delete self._linkIO[id]; return true; } return false; }, enableTrigger: new Overload({ 'string': function (id) { // Alter all triggers of this type var self = this, types = self._trigger, phases, triggers, result = false, i, k, j; if (types) { for (j in types) { if (types.hasOwnProperty(j)) { phases = types[j]; if (phases) { for (i in phases) { if (phases.hasOwnProperty(i)) { triggers = phases[i]; // Loop triggers and set enabled flag for (k = 0; k < triggers.length; k++) { if (triggers[k].id === id) { triggers[k].enabled = true; result = true; } } } } } } } } return result; }, 'number': function (type) { // Alter all triggers of this type var self = this, phases = self._trigger[type], triggers, result = false, i, k; if (phases) { for (i in phases) { if (phases.hasOwnProperty(i)) { triggers = phases[i]; // Loop triggers and set to enabled for (k = 0; k < triggers.length; k++) { triggers[k].enabled = true; result = true; } } } } return result; }, 'number, number': function (type, phase) { // Alter all triggers of this type and phase var self = this, phases = self._trigger[type], triggers, result = false, k; if (phases) { triggers = phases[phase]; if (triggers) { // Loop triggers and set to enabled for (k = 0; k < triggers.length; k++) { triggers[k].enabled = true; result = true; } } } return result; }, 'string, number, number': function (id, type, phase) { // Check if the trigger already exists var self = this, triggerIndex = self._triggerIndexOf(id, type, phase); if (triggerIndex > -1) { // Update the trigger self._trigger[type][phase][triggerIndex].enabled = true; return true; } return false; } }), disableTrigger: new Overload({ 'string': function (id) { // Alter all triggers of this type var self = this, types = self._trigger, phases, triggers, result = false, i, k, j; if (types) { for (j in types) { if (types.hasOwnProperty(j)) { phases = types[j]; if (phases) { for (i in phases) { if (phases.hasOwnProperty(i)) { triggers = phases[i]; // Loop triggers and set enabled flag for (k = 0; k < triggers.length; k++) { if (triggers[k].id === id) { triggers[k].enabled = false; result = true; } } } } } } } } return result; }, 'number': function (type) { // Alter all triggers of this type var self = this, phases = self._trigger[type], triggers, result = false, i, k; if (phases) { for (i in phases) { if (phases.hasOwnProperty(i)) { triggers = phases[i]; // Loop triggers and set to disabled for (k = 0; k < triggers.length; k++) { triggers[k].enabled = false; result = true; } } } } return result; }, 'number, number': function (type, phase) { // Alter all triggers of this type and phase var self = this, phases = self._trigger[type], triggers, result = false, k; if (phases) { triggers = phases[phase]; if (triggers) { // Loop triggers and set to disabled for (k = 0; k < triggers.length; k++) { triggers[k].enabled = false; result = true; } } } return result; }, 'string, number, number': function (id, type, phase) { // Check if the trigger already exists var self = this, triggerIndex = self._triggerIndexOf(id, type, phase); if (triggerIndex > -1) { // Update the trigger self._trigger[type][phase][triggerIndex].enabled = false; return true; } return false; } }), /** * Checks if a trigger will fire based on the type and phase provided. * @param {Number} type The type of operation. See Mixin.Constants for * constants to use. * @param {Number} phase The phase of the operation. See Mixin.Constants * for constants to use. * @returns {Boolean} True if the trigger will fire, false otherwise. */ willTrigger: function (type, phase) { if (!this._ignoreTriggers && this._trigger && this._trigger[type] && this._trigger[type][phase] && this._trigger[type][phase].length) { // Check if a trigger in this array is enabled var arr = this._trigger[type][phase], i; for (i = 0; i < arr.length; i++) { if (arr[i].enabled) { return true; } } } return false; }, /** * Processes trigger actions based on the operation, type and phase. * @param {Object} operation Operation data to pass to the trigger. * @param {Number} type The type of operation. See Mixin.Constants for * constants to use. * @param {Number} phase The phase of the operation. See Mixin.Constants * for constants to use. * @param {Object} oldDoc The document snapshot before operations are * carried out against the data. * @param {Object} newDoc The document snapshot after operations are * carried out against the data. * @returns {boolean} */ processTrigger: function (operation, type, phase, oldDoc, newDoc) { var self = this, triggerArr, triggerIndex, triggerCount, triggerItem, response, typeName, phaseName; if (!self._ignoreTriggers && self._trigger && self._trigger[type] && self._trigger[type][phase]) { triggerArr = self._trigger[type][phase]; triggerCount = triggerArr.length; for (triggerIndex = 0; triggerIndex < triggerCount; triggerIndex++) { triggerItem = triggerArr[triggerIndex]; // Check if the trigger is enabled if (triggerItem.enabled) { if (self.debug()) { switch (type) { case this.TYPE_INSERT: typeName = 'insert'; break; case this.TYPE_UPDATE: typeName = 'update'; break; case this.TYPE_REMOVE: typeName = 'remove'; break; default: typeName = ''; break; } switch (phase) { case this.PHASE_BEFORE: phaseName = 'before'; break; case this.PHASE_AFTER: phaseName = 'after'; break; default: phaseName = ''; break; } console.log('Triggers: Processing trigger "' + triggerItem.id + '" for ' + typeName + ' in phase "' + phaseName + '"'); } // Check if the trigger is already in the stack, if it is, // don't fire it again (this is so we avoid infinite loops // where a trigger triggers another trigger which calls this // one and so on) if (self.triggerStack && self.triggerStack[type] && self.triggerStack[type][phase] && self.triggerStack[type][phase][triggerItem.id]) { // The trigger is already in the stack, do not fire the trigger again if (self.debug()) { console.log('Triggers: Will not run trigger "' + triggerItem.id + '" for ' + typeName + ' in phase "' + phaseName + '" as it is already in the stack!'); } continue; } // Add the trigger to the stack so we don't go down an endless // trigger loop self.triggerStack = self.triggerStack || {}; self.triggerStack[type] = {}; self.triggerStack[type][phase] = {}; self.triggerStack[type][phase][triggerItem.id] = true; // Run the trigger's method and store the response response = triggerItem.method.call(self, operation, oldDoc, newDoc); // Remove the trigger from the stack self.triggerStack = self.triggerStack || {}; self.triggerStack[type] = {}; self.triggerStack[type][phase] = {}; self.triggerStack[type][phase][triggerItem.id] = false; // Check the response for a non-expected result (anything other than // [undefined, true or false] is considered a throwable error) if (response === false) { // The trigger wants us to cancel operations return false; } if (response !== undefined && response !== true && response !== false) { // Trigger responded with error, throw the error throw('ForerunnerDB.Mixin.Triggers: Trigger error: ' + response); } } } // Triggers all ran without issue, return a success (true) return true; } }, /** * Returns the index of a trigger by id based on type and phase. * @param {String} id The id of the trigger to find the index of. * @param {Number} type The type of operation. See Mixin.Constants for * constants to use. * @param {Number} phase The phase of the operation. See Mixin.Constants * for constants to use. * @returns {Number} * @private */ _triggerIndexOf: function (id, type, phase) { var self = this, triggerArr, triggerCount, triggerIndex; if (self._trigger && self._trigger[type] && self._trigger[type][phase]) { triggerArr = self._trigger[type][phase]; triggerCount = triggerArr.length; for (triggerIndex = 0; triggerIndex < triggerCount; triggerIndex++) { if (triggerArr[triggerIndex].id === id) { return triggerIndex; } } } return -1; } }; /** * When called in a before phase the newDoc object can be directly altered * to modify the data in it before the operation is carried out. * @callback Triggers.addTriggerCallback * @param {Object} operation The details about the operation. * @param {Object} oldDoc The document before the operation. * @param {Object} newDoc The document after the operation. */ module.exports = Triggers; },{"./Overload":32}],29:[function(_dereq_,module,exports){ "use strict"; /** * Provides methods to handle object update operations. * @mixin */ var Updating = { /** * Updates a property on an object. * @param {Object} doc The object whose property is to be updated. * @param {String} prop The property to update. * @param {*} val The new value of the property. * @private */ _updateProperty: function (doc, prop, val) { doc[prop] = val; if (this.debug()) { console.log(this.logIdentifier() + ' Setting non-data-bound document property "' + prop + '" to val "' + val + '"'); } }, /** * Increments a value for a property on a document by the passed number. * @param {Object} doc The document to modify. * @param {String} prop The property to modify. * @param {Number} val The amount to increment by. * @private */ _updateIncrement: function (doc, prop, val) { doc[prop] += val; }, /** * Changes the index of an item in the passed array. * @param {Array} arr The array to modify. * @param {Number} indexFrom The index to move the item from. * @param {Number} indexTo The index to move the item to. * @private */ _updateSpliceMove: function (arr, indexFrom, indexTo) { arr.splice(indexTo, 0, arr.splice(indexFrom, 1)[0]); if (this.debug()) { console.log(this.logIdentifier() + ' Moving non-data-bound document array index from "' + indexFrom + '" to "' + indexTo + '"'); } }, /** * Inserts an item into the passed array at the specified index. * @param {Array} arr The array to insert into. * @param {Number} index The index to insert at. * @param {Object} doc The document to insert. * @private */ _updateSplicePush: function (arr, index, doc) { if (arr.length > index) { arr.splice(index, 0, doc); } else { arr.push(doc); } }, /** * Removes an item from the passed array at the specified index. * @param {Array} arr The array to remove from. * @param {Number} index The index of the item to remove. * @param {Number} count The number of items to remove. * @private */ _updateSplicePull: function (arr, index, count) { if (!count) { count = 1; } arr.splice(index, count); }, /** * Inserts an item at the end of an array. * @param {Array} arr The array to insert the item into. * @param {Object} doc The document to insert. * @private */ _updatePush: function (arr, doc) { arr.push(doc); }, /** * Removes an item from the passed array. * @param {Array} arr The array to modify. * @param {Number} index The index of the item in the array to remove. * @private */ _updatePull: function (arr, index) { arr.splice(index, 1); }, /** * Multiplies a value for a property on a document by the passed number. * @param {Object} doc The document to modify. * @param {String} prop The property to modify. * @param {Number} val The amount to multiply by. * @private */ _updateMultiply: function (doc, prop, val) { doc[prop] *= val; }, /** * Renames a property on a document to the passed property. * @param {Object} doc The document to modify. * @param {String} prop The property to rename. * @param {Number} val The new property name. * @private */ _updateRename: function (doc, prop, val) { doc[val] = doc[prop]; delete doc[prop]; }, /** * Sets a property on a document to the passed value. * @param {Object} doc The document to modify. * @param {String} prop The property to set. * @param {*} val The new property value. * @private */ _updateOverwrite: function (doc, prop, val) { doc[prop] = val; }, /** * Deletes a property on a document. * @param {Object} doc The document to modify. * @param {String} prop The property to delete. * @private */ _updateUnset: function (doc, prop) { delete doc[prop]; }, /** * Removes all properties from an object without destroying * the object instance, thereby maintaining data-bound linking. * @param {Object} doc The parent object to modify. * @param {String} prop The name of the child object to clear. * @private */ _updateClear: function (doc, prop) { var obj = doc[prop], i; if (obj && typeof obj === 'object') { for (i in obj) { if (obj.hasOwnProperty(i)) { this._updateUnset(obj, i); } } } }, /** * Pops an item or items from the array stack. * @param {Object} doc The document to modify. * @param {Number} val If set to a positive integer, will pop the number specified * from the stack, if set to a negative integer will shift the number specified * from the stack. * @return {Boolean} * @private */ _updatePop: function (doc, val) { var updated = false, i; if (doc.length > 0) { if (val > 0) { for (i = 0; i < val; i++) { doc.pop(); } updated = true; } else if (val < 0) { for (i = 0; i > val; i--) { doc.shift(); } updated = true; } } return updated; } }; module.exports = Updating; },{}],30:[function(_dereq_,module,exports){ "use strict"; // Tell JSHint about EventSource /*global EventSource */ // Import external names locally var Shared = _dereq_('./Shared'), Core, CoreInit, Collection, NodeApiClient, Overload; /** * The NodeApiClient class. * @class * @constructor */ NodeApiClient = function () { this.init.apply(this, arguments); }; /** * The init method that can be overridden or extended. * @param {Core} core The ForerunnerDB core instance. */ NodeApiClient.prototype.init = function (core) { var self = this; self._core = core; self.rootPath('/fdb'); }; Shared.addModule('NodeApiClient', NodeApiClient); Shared.mixin(NodeApiClient.prototype, 'Mixin.Common'); Shared.mixin(NodeApiClient.prototype, 'Mixin.Events'); Shared.mixin(NodeApiClient.prototype, 'Mixin.ChainReactor'); Core = Shared.modules.Core; CoreInit = Core.prototype.init; Collection = Shared.modules.Collection; Overload = Shared.overload; /** * Gets / sets the rootPath for the client to use in each API call. * @name rootPath * @method NodeApiClient.rootPath * @param {String=} val The path to set. * @returns {*} */ Shared.synthesize(NodeApiClient.prototype, 'rootPath'); /** * Set the url of the server to use for API. * @param {String} host The server host name including protocol. E.g. * "https://0.0.0.0". * @param {String} port The server port number e.g. "8080". */ NodeApiClient.prototype.server = function (host, port) { if (host !== undefined) { if (host.substr(host.length - 1, 1) === '/') { // Strip trailing / host = host.substr(0, host.length - 1); } if (port !== undefined) { this._server = host + ":" + port; } else { this._server = host; } this._host = host; this._port = port; return this; } if (port !== undefined) { return { host: this._host, port: this._port, url: this._server }; } else { return this._server; } }; NodeApiClient.prototype.http = function (method, url, data, options, callback) { var self = this, finalUrl, sessionData, bodyData, xmlHttp = new XMLHttpRequest(); method = method.toUpperCase(); xmlHttp.onreadystatechange = function () { if (xmlHttp.readyState === 4) { if (xmlHttp.status === 200) { // Tell the callback about success if (xmlHttp.responseText) { callback(false, self.jParse(xmlHttp.responseText)); } else { callback(false, {}); } } else if (xmlHttp.status === 204) { callback(false, {}); } else { // Tell the callback about the error callback(xmlHttp.status, xmlHttp.responseText); // Emit the error code self.emit('httpError', xmlHttp.status, xmlHttp.responseText); } } }; switch (method) { case 'GET': case 'DELETE': case 'HEAD': // Check for global auth if (this._sessionData) { data = data !== undefined ? data : {}; // Add the session data to the key specified data[this._sessionData.key] = this._sessionData.obj; } finalUrl = url + (data !== undefined ? '?' + self.jStringify(data) : ''); bodyData = null; break; case 'POST': case 'PUT': case 'PATCH': // Check for global auth if (this._sessionData) { sessionData = {}; // Add the session data to the key specified sessionData[this._sessionData.key] = this._sessionData.obj; } finalUrl = url + (sessionData !== undefined ? '?' + self.jStringify(sessionData) : ''); bodyData = (data !== undefined ? self.jStringify(data) : null); break; default: return false; } xmlHttp.open(method, finalUrl, true); xmlHttp.setRequestHeader("Content-Type", "application/json;charset=UTF-8"); xmlHttp.send(bodyData); return this; }; // Define HTTP helper methods NodeApiClient.prototype.head = new Overload({ 'string, function': function (path, callback) { return this.$main.call(this, path, undefined, {}, callback); }, 'string, *, function': function (path, data, callback) { return this.$main.call(this, path, data, {}, callback); }, 'string, *, object, function': function (path, data, options, callback) { return this.$main.call(this, path, data, options, callback); }, '$main': function (path, data, options, callback) { return this.http('HEAD', this.server() + this._rootPath + path, data, options, callback); } }); NodeApiClient.prototype.get = new Overload({ 'string, function': function (path, callback) { return this.$main.call(this, path, undefined, {}, callback); }, 'string, *, function': function (path, data, callback) { return this.$main.call(this, path, data, {}, callback); }, 'string, *, object, function': function (path, data, options, callback) { return this.$main.call(this, path, data, options, callback); }, '$main': function (path, data, options, callback) { return this.http('GET', this.server() + this._rootPath + path, data, options, callback); } }); NodeApiClient.prototype.put = new Overload({ 'string, function': function (path, callback) { return this.$main.call(this, path, undefined, {}, callback); }, 'string, *, function': function (path, data, callback) { return this.$main.call(this, path, data, {}, callback); }, 'string, *, object, function': function (path, data, options, callback) { return this.$main.call(this, path, data, options, callback); }, '$main': function (path, data, options, callback) { return this.http('PUT', this.server() + this._rootPath + path, data, options, callback); } }); NodeApiClient.prototype.post = new Overload({ 'string, function': function (path, callback) { return this.$main.call(this, path, undefined, {}, callback); }, 'string, *, function': function (path, data, callback) { return this.$main.call(this, path, data, {}, callback); }, 'string, *, object, function': function (path, data, options, callback) { return this.$main.call(this, path, data, options, callback); }, '$main': function (path, data, options, callback) { return this.http('POST', this.server() + this._rootPath + path, data, options, callback); } }); NodeApiClient.prototype.patch = new Overload({ 'string, function': function (path, callback) { return this.$main.call(this, path, undefined, {}, callback); }, 'string, *, function': function (path, data, callback) { return this.$main.call(this, path, data, {}, callback); }, 'string, *, object, function': function (path, data, options, callback) { return this.$main.call(this, path, data, options, callback); }, '$main': function (path, data, options, callback) { return this.http('PATCH', this.server() + this._rootPath + path, data, options, callback); } }); NodeApiClient.prototype.postPatch = function (path, id, data, options, callback) { var self = this; // Determine if the item exists or not this.head(path + '/' + id, undefined, {}, function (err, headData) { if (err) { if (err === 404) { // Item does not exist, run post return self.http('POST', self.server() + self._rootPath + path, data, options, callback); } else { callback(err, data); } } else { // Item already exists, run patch return self.http('PATCH', self.server() + self._rootPath + path + '/' + id, data, options, callback); } }); }; NodeApiClient.prototype.delete = new Overload({ 'string, function': function (path, callback) { return this.$main.call(this, path, undefined, {}, callback); }, 'string, *, function': function (path, data, callback) { return this.$main.call(this, path, data, {}, callback); }, 'string, *, object, function': function (path, data, options, callback) { return this.$main.call(this, path, data, options, callback); }, '$main': function (path, data, options, callback) { return this.http('DELETE', this.server() + this._rootPath + path, data, options, callback); } }); /** * Gets/ sets a global object that will be sent up with client * requests to the API or REST server. * @param {String} key The key to send the session object up inside. * @param {*} obj The object / value to send up with all requests. If * a request has its own data to send up, this session data will be * mixed in to the request data under the specified key. */ NodeApiClient.prototype.session = function (key, obj) { if (key !== undefined && obj !== undefined) { this._sessionData = { key: key, obj: obj }; return this; } return this._sessionData; }; /** * Initiates a client connection to the API server. * @param collectionInstance * @param path * @param query * @param options * @param callback */ NodeApiClient.prototype.sync = function (collectionInstance, path, query, options, callback) { var self = this, source, finalPath, queryParams, queryString = '', connecting = true; if (this.debug()) { console.log(this.logIdentifier() + ' Connecting to API server ' + this.server() + this._rootPath + path); } finalPath = this.server() + this._rootPath + path + '/_sync'; // Check for global auth if (this._sessionData) { queryParams = queryParams || {}; if (this._sessionData.key) { // Add the session data to the key specified queryParams[this._sessionData.key] = this._sessionData.obj; } else { // Add the session data to the root query object Shared.mixin(queryParams, this._sessionData.obj); } } if (query) { queryParams = queryParams || {}; queryParams.$query = query; } if (options) { queryParams = queryParams || {}; if (options.$initialData === undefined) { options.$initialData = true; } queryParams.$options = options; } if (queryParams) { queryString = this.jStringify(queryParams); finalPath += '?' + queryString; } source = new EventSource(finalPath); collectionInstance.__apiConnection = source; source.addEventListener('open', function (e) { if (!options || (options && options.$initialData)) { // The connection is open, grab the initial data self.get(path, queryParams, function (err, data) { if (!err) { collectionInstance.upsert(data); } }); } }, false); source.addEventListener('error', function (e) { if (source.readyState === 2) { // The connection is dead, remove the connection collectionInstance.unSync(); } if (connecting) { connecting = false; callback(e); } }, false); source.addEventListener('insert', function(e) { var data = self.jParse(e.data); collectionInstance.insert(data.dataSet); }, false); source.addEventListener('update', function(e) { var data = self.jParse(e.data); collectionInstance.update(data.query, data.update); }, false); source.addEventListener('remove', function(e) { var data = self.jParse(e.data); collectionInstance.remove(data.query); }, false); if (callback) { source.addEventListener('connected', function (e) { if (connecting) { connecting = false; callback(false); } }, false); } }; Collection.prototype.sync = new Overload({ /** * Sync with this collection on the server-side. * @name sync * @method Collection.sync * @param {Function} callback The callback method to call once * the connection to the server has been established. */ 'function': function (callback) { this.$main.call(this, '/' + this._db.name() + '/collection/' + this.name(), null, null, callback); }, /** * Sync with this collection on the server-side. * @name sync * @method Collection.sync * @param {String} collectionName The collection to sync from. * @param {Function} callback The callback method to call once * the connection to the server has been established. */ 'string, function': function (collectionName, callback) { this.$main.call(this, '/' + this._db.name() + '/collection/' + collectionName, null, null, callback); }, /** * Sync with this collection on the server-side. * @name sync * @method Collection.sync * @param {Object} query A query object. * @param {Function} callback The callback method to call once * the connection to the server has been established. */ 'object, function': function (query, callback) { this.$main.call(this, '/' + this._db.name() + '/collection/' + this.name(), query, null, callback); }, /** * Sync with this collection on the server-side. * @name sync * @method Collection.sync * @param {String} objectType The type of object to sync to e.g. * "collection" or "view". * @param {String} objectName The name of the object to sync from. * @param {Function} callback The callback method to call once * the connection to the server has been established. */ 'string, string, function': function (objectType, objectName, callback) { this.$main.call(this, '/' + this._db.name() + '/' + objectType + '/' + objectName, null, null, callback); }, /** * Sync with this collection on the server-side. * @name sync * @method Collection.sync * @param {String} collectionName The collection to sync from. * @param {Object} query A query object. * @param {Function} callback The callback method to call once * the connection to the server has been established. */ 'string, object, function': function (collectionName, query, callback) { this.$main.call(this, '/' + this._db.name() + '/collection/' + collectionName, query, null, callback); }, /** * Sync with this collection on the server-side. * @name sync * @method Collection.sync * @param {String} objectType The type of object to sync to e.g. * "collection" or "view". * @param {String} objectName The name of the object to sync from. * @param {Object} query A query object. * @param {Function} callback The callback method to call once * the connection to the server has been established. */ 'string, string, object, function': function (objectType, objectName, query, callback) { this.$main.call(this, '/' + this._db.name() + '/' + objectType + '/' + objectName, query, null, callback); }, /** * Sync with this collection on the server-side. * @name sync * @method Collection.sync * @param {Object} query A query object. * @param {Object} options An options object. * @param {Function} callback The callback method to call once * the connection to the server has been established. */ 'object, object, function': function (query, options, callback) { this.$main.call(this, '/' + this._db.name() + '/collection/' + this.name(), query, options, callback); }, /** * Sync with this collection on the server-side. * @name sync * @method Collection.sync * @param {String} collectionName The collection to sync from. * @param {Object} query A query object. * @param {Object} options An options object. * @param {Function} callback The callback method to call once * the connection to the server has been established. */ 'string, object, object, function': function (collectionName, query, options, callback) { this.$main.call(this, '/' + this._db.name() + '/collection/' + collectionName, query, options, callback); }, /** * Sync with this collection on the server-side. * @name sync * @method Collection.sync * @param {String} objectType The type of object to sync to e.g. * "collection" or "view". * @param {String} objectName The name of the object to sync from. * @param {Object} query A query object. * @param {Object} options An options object. * @param {Function} callback The callback method to call once * the connection to the server has been established. */ 'string, string, object, object, function': function (objectType, objectName, query, options, callback) { this.$main.call(this, '/' + this._db.name() + '/' + objectType + '/' + objectName, query, options, callback); }, '$main': function (path, query, options, callback) { var self = this; if (this._db && this._db._core) { // Kill any existing sync connection this.unSync(); // Create new sync connection this._db._core.api.sync(this, path, query, options, callback); // Hook on drop to call unsync this.on('drop', function () { self.unSync(); }); } else { throw(this.logIdentifier() + ' Cannot sync for an anonymous collection! (Collection must be attached to a database)'); } } }); /** * Disconnects an existing connection to a sync server. * @returns {boolean} True if a connection existed, false * if no connection existed. */ Collection.prototype.unSync = function () { if (this.__apiConnection) { if (this.__apiConnection.readyState !== 2) { this.__apiConnection.close(); } delete this.__apiConnection; return true; } return false; }; Collection.prototype.http = new Overload({ 'string, function': function (method, callback) { this.$main.call(this, method, '/' + this._db.name() + '/collection/' + this.name(), undefined, undefined, {}, callback); }, '$main': function (method, path, queryObj, queryOptions, options, callback) { if (this._db && this._db._core) { return this._db._core.api.http('GET', this._db._core.api.server() + this._rootPath + path, {"$query": queryObj, "$options": queryOptions}, options, callback); } else { throw(this.logIdentifier() + ' Cannot do HTTP for an anonymous collection! (Collection must be attached to a database)'); } } }); Collection.prototype.autoHttp = new Overload({ 'string, function': function (method, callback) { this.$main.call(this, method, '/' + this._db.name() + '/collection/' + this.name(), undefined, undefined, {}, callback); }, 'string, string, function': function (method, collectionName, callback) { this.$main.call(this, method, '/' + this._db.name() + '/collection/' + collectionName, undefined, undefined, {}, callback); }, 'string, string, string, function': function (method, objType, objName, callback) { this.$main.call(this, method, '/' + this._db.name() + '/' + objType + '/' + objName, undefined, undefined, {}, callback); }, 'string, string, string, object, function': function (method, objType, objName, queryObj, callback) { this.$main.call(this, method, '/' + this._db.name() + '/' + objType + '/' + objName, queryObj, undefined, {}, callback); }, 'string, string, string, object, object, function': function (method, objType, objName, queryObj, queryOptions, callback) { this.$main.call(this, method, '/' + this._db.name() + '/' + objType + '/' + objName, queryObj, queryOptions, {}, callback); }, '$main': function (method, path, queryObj, queryOptions, options, callback) { var self = this; if (this._db && this._db._core) { return this._db._core.api.http('GET', this._db._core.api.server() + this._db._core.api._rootPath + path, {"$query": queryObj, "$options": queryOptions}, options, function (err, data) { var i; if (!err && data) { // Check the type of method we used and operate on the collection accordingly switch (method) { // Find insert case 'GET': self.insert(data); break; // Insert case 'POST': if (data.inserted && data.inserted.length) { self.insert(data.inserted); } break; // Update overwrite case 'PUT': case 'PATCH': if (data instanceof Array) { // Update each document for (i = 0; i < data.length; i++) { self.updateById(data[i]._id, {$overwrite: data[i]}); } } else { // Update single document self.updateById(data._id, {$overwrite: data}); } break; // Remove case 'DELETE': self.remove(data); break; default: // Nothing to do with this method break; } } // Send the data back to the callback callback(err, data); }); } else { throw(this.logIdentifier() + ' Cannot do HTTP for an anonymous collection! (Collection must be attached to a database)'); } } }); // Override the Core init to instantiate the plugin Core.prototype.init = function () { CoreInit.apply(this, arguments); this.api = new NodeApiClient(this); }; Shared.finishModule('NodeApiClient'); module.exports = NodeApiClient; },{"./Shared":40}],31:[function(_dereq_,module,exports){ "use strict"; var Shared = _dereq_('./Shared'), Path = _dereq_('./Path'); /** * The operation class, used to store details about an operation being * performed by the database. * @param {String} name The name of the operation. * @constructor */ var Operation = function (name) { this.pathSolver = new Path(); this.counter = 0; this.init.apply(this, arguments); }; Operation.prototype.init = function (name) { this._data = { operation: name, // The name of the operation executed such as "find", "update" etc index: { potential: [], // Indexes that could have potentially been used used: false // The index that was picked to use }, steps: [], // The steps taken to generate the query results, time: { startMs: 0, stopMs: 0, totalMs: 0, process: {} }, flag: {}, // An object with flags that denote certain execution paths log: [] // Any extra data that might be useful such as warnings or helpful hints }; }; Shared.addModule('Operation', Operation); Shared.mixin(Operation.prototype, 'Mixin.ChainReactor'); /** * Starts the operation timer. */ Operation.prototype.start = function () { this._data.time.startMs = new Date().getTime(); }; /** * Adds an item to the operation log. * @param {String} event The item to log. * @returns {*} */ Operation.prototype.log = function (event) { if (event) { var lastLogTime = this._log.length > 0 ? this._data.log[this._data.log.length - 1].time : 0, logObj = { event: event, time: new Date().getTime(), delta: 0 }; this._data.log.push(logObj); if (lastLogTime) { logObj.delta = logObj.time - lastLogTime; } return this; } return this._data.log; }; /** * Called when starting and ending a timed operation, used to time * internal calls within an operation's execution. * @param {String} section An operation name. * @returns {*} */ Operation.prototype.time = function (section) { if (section !== undefined) { var process = this._data.time.process, processObj = process[section] = process[section] || {}; if (!processObj.startMs) { // Timer started processObj.startMs = new Date().getTime(); processObj.stepObj = { name: section }; this._data.steps.push(processObj.stepObj); } else { processObj.stopMs = new Date().getTime(); processObj.totalMs = processObj.stopMs - processObj.startMs; processObj.stepObj.totalMs = processObj.totalMs; delete processObj.stepObj; } return this; } return this._data.time; }; /** * Used to set key/value flags during operation execution. * @param {String} key * @param {String} val * @returns {*} */ Operation.prototype.flag = function (key, val) { if (key !== undefined && val !== undefined) { this._data.flag[key] = val; } else if (key !== undefined) { return this._data.flag[key]; } else { return this._data.flag; } }; Operation.prototype.data = function (path, val, noTime) { if (val !== undefined) { // Assign value to object path this.pathSolver.set(this._data, path, val); return this; } return this.pathSolver.get(this._data, path); }; Operation.prototype.pushData = function (path, val, noTime) { // Assign value to object path this.pathSolver.push(this._data, path, val); }; /** * Stops the operation timer. */ Operation.prototype.stop = function () { this._data.time.stopMs = new Date().getTime(); this._data.time.totalMs = this._data.time.stopMs - this._data.time.startMs; }; Shared.finishModule('Operation'); module.exports = Operation; },{"./Path":34,"./Shared":40}],32:[function(_dereq_,module,exports){ "use strict"; /** * Allows a method to accept overloaded calls with different parameters controlling * which passed overload function is called. * @param {String=} name A name to provide this overload to help identify * it if any errors occur during the resolving phase of the overload. This * is purely for debug purposes and serves no functional purpose. * @param {Object} def The overload definition. * @returns {Function} * @constructor */ var Overload = function (name, def) { if (!def) { def = name; name = undefined; } if (def) { var self = this, index, count, tmpDef, defNewKey, sigIndex, signatures; if (!(def instanceof Array)) { tmpDef = {}; // Def is an object, make sure all prop names are devoid of spaces for (index in def) { if (def.hasOwnProperty(index)) { defNewKey = index.replace(/ /g, ''); // Check if the definition array has a * string in it if (defNewKey.indexOf('*') === -1) { // No * found tmpDef[defNewKey] = def[index]; } else { // A * was found, generate the different signatures that this // definition could represent signatures = this.generateSignaturePermutations(defNewKey); for (sigIndex = 0; sigIndex < signatures.length; sigIndex++) { if (!tmpDef[signatures[sigIndex]]) { tmpDef[signatures[sigIndex]] = def[index]; } } } } } def = tmpDef; } return function () { var arr = [], lookup, type, overloadName; // Check if we are being passed a key/function object or an array of functions if (def instanceof Array) { // We were passed an array of functions count = def.length; for (index = 0; index < count; index++) { if (def[index].length === arguments.length) { return self.callExtend(this, '$main', def, def[index], arguments); } } } else { // Generate lookup key from arguments // Copy arguments to an array for (index = 0; index < arguments.length; index++) { type = typeof arguments[index]; // Handle detecting arrays if (type === 'object' && arguments[index] instanceof Array) { type = 'array'; } // Handle been presented with a single undefined argument if (arguments.length === 1 && type === 'undefined') { break; } // Add the type to the argument types array arr.push(type); } lookup = arr.join(','); // Check for an exact lookup match if (def[lookup]) { return self.callExtend(this, '$main', def, def[lookup], arguments); } else { for (index = arr.length; index >= 0; index--) { // Get the closest match lookup = arr.slice(0, index).join(','); if (def[lookup + ',...']) { // Matched against arguments + "any other" return self.callExtend(this, '$main', def, def[lookup + ',...'], arguments); } } } } overloadName = name !== undefined ? name : typeof this.name === 'function' ? this.name() : 'Unknown'; console.log('Overload Definition:', def); throw('ForerunnerDB.Overload "' + overloadName + '": Overloaded method does not have a matching signature "' + lookup + '" for the passed arguments: ' + this.jStringify(arr)); }; } return function () {}; }; /** * Generates an array of all the different definition signatures that can be * created from the passed string with a catch-all wildcard *. E.g. it will * convert the signature: string,*,string to all potentials: * string,string,string * string,number,string * string,object,string, * string,function,string, * string,undefined,string * * @param {String} str Signature string with a wildcard in it. * @returns {Array} An array of signature strings that are generated. */ Overload.prototype.generateSignaturePermutations = function (str) { var signatures = [], newSignature, types = ['array', 'string', 'object', 'number', 'function', 'undefined'], index; if (str.indexOf('*') > -1) { // There is at least one "any" type, break out into multiple keys // We could do this at query time with regular expressions but // would be significantly slower for (index = 0; index < types.length; index++) { newSignature = str.replace('*', types[index]); signatures = signatures.concat(this.generateSignaturePermutations(newSignature)); } } else { signatures.push(str); } return signatures; }; Overload.prototype.callExtend = function (context, prop, propContext, func, args) { var tmp, ret; if (context && propContext[prop]) { tmp = context[prop]; context[prop] = propContext[prop]; ret = func.apply(context, args); context[prop] = tmp; return ret; } else { return func.apply(context, args); } }; module.exports = Overload; },{}],33:[function(_dereq_,module,exports){ "use strict"; // Import external names locally var Shared, Db, Collection, DbDocument; Shared = _dereq_('./Shared'); /** * The Overview class provides a queryable interface for data aggregation in realtime. * @class */ var Overview = function () { this.init.apply(this, arguments); }; /** * Initialises the overview, called by the class as it is instantiated. * @constructor */ Overview.prototype.init = function (name) { var self = this; this._name = name; this._data = new DbDocument('__FDB__dc_data_' + this._name); this._collData = new Collection(); this._sources = []; this._sourceDroppedWrap = function () { self._sourceDropped.apply(self, arguments); }; }; Shared.addModule('Overview', Overview); Shared.mixin(Overview.prototype, 'Mixin.Common'); Shared.mixin(Overview.prototype, 'Mixin.ChainReactor'); Shared.mixin(Overview.prototype, 'Mixin.Constants'); Shared.mixin(Overview.prototype, 'Mixin.Triggers'); Shared.mixin(Overview.prototype, 'Mixin.Events'); Shared.mixin(Overview.prototype, 'Mixin.Tags'); Collection = _dereq_('./Collection'); DbDocument = _dereq_('./Document'); Db = Shared.modules.Db; /** * Gets / sets the current state. * @param {String=} val The name of the state to set. * @returns {*} */ Shared.synthesize(Overview.prototype, 'state'); /** * Gets / sets the db. * @param {Db=} val The db to set. * @returns {*} */ Shared.synthesize(Overview.prototype, 'db'); /** * Gets / sets the name. * @param {String=} val The name to set. * @returns {*} */ Shared.synthesize(Overview.prototype, 'name'); /** * Gets / sets the query. * @param {Object=} val The query to set. * @returns {*} */ Shared.synthesize(Overview.prototype, 'query', function (val) { var ret = this.$super(val); if (val !== undefined) { this._refresh(); } return ret; }); /** * Gets / sets the query options. * @param {Object=} val The query options to set. * @returns {*} */ Shared.synthesize(Overview.prototype, 'queryOptions', function (val) { var ret = this.$super(val); if (val !== undefined) { this._refresh(); } return ret; }); /** * Gets / sets the reduce object. * @param {Object=} val The reduce object to set. * @returns {*} */ Shared.synthesize(Overview.prototype, 'reduce', function (val) { var ret = this.$super(val); if (val !== undefined) { this._refresh(); } return ret; }); /** * Gets / sets the data source the overview uses for underlying data. * @param {Collection|View=} source The data source to set. * @returns {*} */ Overview.prototype.from = function (source) { if (source !== undefined) { if (typeof(source) === 'string') { source = this._db.collection(source); } this._setFrom(source); return this; } return this._sources; }; /** * Returns the data in the overview. * @param {Object=} query The query object. * @param {Object=} options An options object. * @param {Function=} callback A callback method. * @returns {*} * @see Collection.find() */ Overview.prototype.find = function (query, options, callback) { return this._collData.find.apply(this._collData, arguments); }; /** * Executes and returns the response from the current reduce method * assigned to the overview. * @returns {*} */ Overview.prototype.exec = function () { var reduceFunc = this.reduce(); return reduceFunc ? reduceFunc.apply(this) : undefined; }; /** * Returns a count of the documents in the overview data set. * @returns {*} */ Overview.prototype.count = function () { return this._collData.count.apply(this._collData, arguments); }; Overview.prototype._setFrom = function (source) { // Remove all source references while (this._sources.length) { this._removeSource(this._sources[0]); } this._addSource(source); return this; }; Overview.prototype._addSource = function (source) { if (source && source.className === 'View') { // The source is a view so IO to the internal data collection // instead of the view proper source = source.data(); if (this.debug()) { console.log(this.logIdentifier() + ' Using internal private data "' + source.instanceIdentifier() + '" for IO graph linking'); } } if (this._sources.indexOf(source) === -1) { this._sources.push(source); source.chain(this); source.on('drop', this._sourceDroppedWrap); this._refresh(); } return this; }; Overview.prototype._removeSource = function (source) { if (source && source.className === 'View') { // The source is a view so IO to the internal data collection // instead of the view proper source = source.data(); if (this.debug()) { console.log(this.logIdentifier() + ' Using internal private data "' + source.instanceIdentifier() + '" for IO graph linking'); } } var sourceIndex = this._sources.indexOf(source); if (sourceIndex > -1) { this._sources.splice(source, 1); source.unChain(this); source.off('drop', this._sourceDroppedWrap); this._refresh(); } return this; }; Overview.prototype._sourceDropped = function (source) { if (source) { // Source was dropped, remove from overview this._removeSource(source); } }; Overview.prototype._refresh = function () { if (!this.isDropped()) { if (this._sources && this._sources[0]) { this._collData.primaryKey(this._sources[0].primaryKey()); var tempArr = [], i; for (i = 0; i < this._sources.length; i++) { tempArr = tempArr.concat(this._sources[i].find(this._query, this._queryOptions)); } this._collData.setData(tempArr); } // Now execute the reduce method if (this._reduce) { var reducedData = this._reduce.apply(this); // Update the document with the newly returned data this._data.setData(reducedData); } } }; Overview.prototype._chainHandler = function (chainPacket) { switch (chainPacket.type) { case 'setData': case 'insert': case 'update': case 'remove': this._refresh(); break; default: break; } }; /** * Gets the module's internal data collection. * @returns {Collection} */ Overview.prototype.data = function () { return this._data; }; /** * Drops the overview. * @param {Function=} callback A callback function. * @returns {Boolean} */ Overview.prototype.drop = function (callback) { if (!this.isDropped()) { this._state = 'dropped'; delete this._data; delete this._collData; // Remove all source references while (this._sources.length) { this._removeSource(this._sources[0]); } delete this._sources; if (this._db && this._name) { delete this._db._overview[this._name]; } this.emit('drop', this); if (callback) { callback(false, true); } delete this._listeners; } return true; }; /** * Create an overview instance from a Db instance. * @param {String} name The name of the overview. * @returns {*} */ Db.prototype.overview = function (name) { var self = this; if (name) { // Handle being passed an instance if (name instanceof Overview) { return name; } if (this._overview && this._overview[name]) { return this._overview[name]; } this._overview = this._overview || {}; this._overview[name] = new Overview(name).db(this); self.deferEmit('create', self._overview[name], 'overview', name); return this._overview[name]; } else { // Return an object of collection data return this._overview || {}; } }; /** * Returns an array of overviews the DB currently has. * @returns {Array} An array of objects containing details of each overview * the database is currently managing. */ Db.prototype.overviews = function () { var arr = [], item, i; for (i in this._overview) { if (this._overview.hasOwnProperty(i)) { item = this._overview[i]; arr.push({ name: i, count: item.count(), linked: item.isLinked !== undefined ? item.isLinked() : false }); } } return arr; }; Shared.finishModule('Overview'); module.exports = Overview; },{"./Collection":6,"./Document":11,"./Shared":40}],34:[function(_dereq_,module,exports){ "use strict"; var Shared = _dereq_('./Shared'); /** * Path object used to resolve object paths and retrieve data from * objects by using paths. * @param {String=} path The path to assign. * @constructor */ var Path = function (path) { this.init.apply(this, arguments); }; Path.prototype.init = function (path) { if (path) { this.path(path); } }; Shared.addModule('Path', Path); Shared.mixin(Path.prototype, 'Mixin.Common'); Shared.mixin(Path.prototype, 'Mixin.ChainReactor'); /** * Gets / sets the given path for the Path instance. * @param {String=} path The path to assign. */ Path.prototype.path = function (path) { if (path !== undefined) { this._path = this.clean(path); this._pathParts = this._path.split('.'); return this; } return this._path; }; /** * Tests if the passed object has the paths that are specified and that * a value exists in those paths. * @param {Object} testKeys The object describing the paths to test for. * @param {Object} testObj The object to test paths against. * @returns {Boolean} True if the object paths exist. */ Path.prototype.hasObjectPaths = function (testKeys, testObj) { var result = true, i; for (i in testKeys) { if (testKeys.hasOwnProperty(i)) { if (testObj[i] === undefined) { return false; } if (typeof testKeys[i] === 'object') { // Recurse object result = this.hasObjectPaths(testKeys[i], testObj[i]); // Should we exit early? if (!result) { return false; } } } } return result; }; /** * Counts the total number of key endpoints in the passed object. * @param {Object} testObj The object to count key endpoints for. * @returns {Number} The number of endpoints. */ Path.prototype.countKeys = function (testObj) { var totalKeys = 0, i; for (i in testObj) { if (testObj.hasOwnProperty(i)) { if (testObj[i] !== undefined) { if (typeof testObj[i] !== 'object') { totalKeys++; } else { totalKeys += this.countKeys(testObj[i]); } } } } return totalKeys; }; /** * Tests if the passed object has the paths that are specified and that * a value exists in those paths and if so returns the number matched. * @param {Object} testKeys The object describing the paths to test for. * @param {Object} testObj The object to test paths against. * @returns {Object} Stats on the matched keys */ Path.prototype.countObjectPaths = function (testKeys, testObj) { var matchData, matchedKeys = {}, matchedKeyCount = 0, totalKeyCount = 0, i; for (i in testObj) { if (testObj.hasOwnProperty(i)) { if (typeof testObj[i] === 'object') { // The test / query object key is an object, recurse matchData = this.countObjectPaths(testKeys[i], testObj[i]); matchedKeys[i] = matchData.matchedKeys; totalKeyCount += matchData.totalKeyCount; matchedKeyCount += matchData.matchedKeyCount; } else { // The test / query object has a property that is not an object so add it as a key totalKeyCount++; // Check if the test keys also have this key and it is also not an object if (testKeys && testKeys[i] && typeof testKeys[i] !== 'object') { matchedKeys[i] = true; matchedKeyCount++; } else { matchedKeys[i] = false; } } } } return { matchedKeys: matchedKeys, matchedKeyCount: matchedKeyCount, totalKeyCount: totalKeyCount }; }; /** * Takes a non-recursive object and converts the object hierarchy into * a path string. * @param {Object} obj The object to parse. * @param {Boolean=} withValue If true will include a 'value' key in the returned * object that represents the value the object path points to. * @returns {Object} */ Path.prototype.parse = function (obj, withValue) { var paths = [], path = '', resultData, i, k; for (i in obj) { if (obj.hasOwnProperty(i)) { // Set the path to the key path = i; if (typeof(obj[i]) === 'object') { if (withValue) { resultData = this.parse(obj[i], withValue); for (k = 0; k < resultData.length; k++) { paths.push({ path: path + '.' + resultData[k].path, value: resultData[k].value }); } } else { resultData = this.parse(obj[i]); for (k = 0; k < resultData.length; k++) { paths.push({ path: path + '.' + resultData[k].path }); } } } else { if (withValue) { paths.push({ path: path, value: obj[i] }); } else { paths.push({ path: path }); } } } } return paths; }; /** * Takes a non-recursive object and converts the object hierarchy into * an array of path strings that allow you to target all possible paths * in an object. * * The options object accepts an "ignore" field with a regular expression * as the value. If any key matches the expression it is not included in * the results. * * The options object accepts a boolean "verbose" field. If set to true * the results will include all paths leading up to endpoints as well as * they endpoints themselves. * * @returns {Array} */ Path.prototype.parseArr = function (obj, options) { options = options || {}; return this._parseArr(obj, '', [], options); }; Path.prototype._parseArr = function (obj, path, paths, options) { var i, newPath = ''; path = path || ''; paths = paths || []; for (i in obj) { if (obj.hasOwnProperty(i)) { if (!options.ignore || (options.ignore && !options.ignore.test(i))) { if (path) { newPath = path + '.' + i; } else { newPath = i; } if (typeof(obj[i]) === 'object') { if (options.verbose) { paths.push(newPath); } this._parseArr(obj[i], newPath, paths, options); } else { paths.push(newPath); } } } } return paths; }; /** * Sets a value on an object for the specified path. * @param {Object} obj The object to update. * @param {String} path The path to update. * @param {*} val The value to set the object path to. * @returns {*} */ Path.prototype.set = function (obj, path, val) { if (obj !== undefined && path !== undefined) { var pathParts, part; path = this.clean(path); pathParts = path.split('.'); part = pathParts.shift(); if (pathParts.length) { // Generate the path part in the object if it does not already exist obj[part] = obj[part] || {}; // Recurse this.set(obj[part], pathParts.join('.'), val); } else { // Set the value obj[part] = val; } } return obj; }; /** * Retrieves all the values inside an object based on the passed * path string. Will automatically traverse any arrays it encounters * and assumes array indexes are not part of the specifed path. * @param {Object|Array} obj An object or array of objects to * scan paths for. * @param {String} path The path string delimited by a period. * @return {Array} An array of values found at the end of each path * termination. */ Path.prototype.aggregate = function (obj, path) { var pathParts, part, values = [], i; // First, check if the object we are given // is an array. If so, loop it and work on // the objects inside if (obj instanceof Array) { // Loop array and get path data from each sub object for (i = 0; i < obj.length; i++) { values = values.concat(this.aggregate(obj[i], path)); } return values; } if (path.indexOf('.') === -1) { // No further parts to navigate // Return an array so the value can be concatenated on return via array.concat() return [obj[path]]; } pathParts = path.split('.'); // Grab the next part of our path part = pathParts.shift(); values = values.concat(this.aggregate(obj[part], pathParts.join('.'))); return values; }; /** * Gets a single value from the passed object and given path. * @param {Object} obj The object to inspect. * @param {String} path The path to retrieve data from. * @returns {*} */ Path.prototype.get = function (obj, path) { return this.value(obj, path)[0]; }; /** * Gets the value(s) that the object contains for the currently assigned path string. * @param {Object} obj The object to evaluate the path against. * @param {String=} path A path to use instead of the existing one passed in path(). * @param {Object=} options An optional options object. * @returns {Array} An array of values for the given path. */ Path.prototype.value = function (obj, path, options) { var pathParts, arr, arrCount, objPart, objPartParent, valuesArr, returnArr, i, k; // Detect early exit if (path && path.indexOf('.') === -1) { if (options && options.skipArrCheck) { return [obj[path]]; } if (!(obj instanceof Array)) { return [obj[path]]; } } if (obj !== undefined && typeof obj === 'object') { if (!options || options && !options.skipArrCheck) { // Check if we were passed an array of objects and if so, // iterate over the array and return the value from each // array item if (obj instanceof Array) { returnArr = []; for (i = 0; i < obj.length; i++) { returnArr.push(this.get(obj[i], path)); } return returnArr; } } valuesArr = []; if (path !== undefined) { path = this.clean(path); pathParts = path.split('.'); } arr = pathParts || this._pathParts; arrCount = arr.length; objPart = obj; for (i = 0; i < arrCount; i++) { objPart = objPart[arr[i]]; if (objPartParent instanceof Array) { // Search inside the array for the next key for (k = 0; k < objPartParent.length; k++) { valuesArr = valuesArr.concat(this.value(objPartParent, k + '.' + arr[i], {skipArrCheck: true})); } return valuesArr; } else { if (!objPart || typeof(objPart) !== 'object') { break; } } objPartParent = objPart; } return [objPart]; } else { return []; } }; /** * Push a value to an array on an object for the specified path. * @param {Object} obj The object to update. * @param {String} path The path to the array to push to. * @param {*} val The value to push to the array at the object path. * @returns {*} */ Path.prototype.push = function (obj, path, val) { if (obj !== undefined && path !== undefined) { var pathParts, part; path = this.clean(path); pathParts = path.split('.'); part = pathParts.shift(); if (pathParts.length) { // Generate the path part in the object if it does not already exist obj[part] = obj[part] || {}; // Recurse this.set(obj[part], pathParts.join('.'), val); } else { // Set the value obj[part] = obj[part] || []; if (obj[part] instanceof Array) { obj[part].push(val); } else { throw('ForerunnerDB.Path: Cannot push to a path whose endpoint is not an array!'); } } } return obj; }; /** * Gets the value(s) that the object contains for the currently assigned path string * with their associated keys. * @param {Object} obj The object to evaluate the path against. * @param {String=} path A path to use instead of the existing one passed in path(). * @returns {Array} An array of values for the given path with the associated key. */ Path.prototype.keyValue = function (obj, path) { var pathParts, arr, arrCount, objPart, objPartParent, objPartHash, i; if (path !== undefined) { path = this.clean(path); pathParts = path.split('.'); } arr = pathParts || this._pathParts; arrCount = arr.length; objPart = obj; for (i = 0; i < arrCount; i++) { objPart = objPart[arr[i]]; if (!objPart || typeof(objPart) !== 'object') { objPartHash = arr[i] + ':' + objPart; break; } objPartParent = objPart; } return objPartHash; }; /** * Sets a value on an object for the specified path. * @param {Object} obj The object to update. * @param {String} path The path to update. * @param {*} val The value to set the object path to. * @returns {*} */ Path.prototype.set = function (obj, path, val) { if (obj !== undefined && path !== undefined) { var pathParts, part; path = this.clean(path); pathParts = path.split('.'); part = pathParts.shift(); if (pathParts.length) { // Generate the path part in the object if it does not already exist obj[part] = obj[part] || {}; // Recurse this.set(obj[part], pathParts.join('.'), val); } else { // Set the value obj[part] = val; } } return obj; }; /** * Removes leading period (.) from string and returns it. * @param {String} str The string to clean. * @returns {*} */ Path.prototype.clean = function (str) { if (str.substr(0, 1) === '.') { str = str.substr(1, str.length -1); } return str; }; Shared.finishModule('Path'); module.exports = Path; },{"./Shared":40}],35:[function(_dereq_,module,exports){ "use strict"; // Import external names locally var Shared = _dereq_('./Shared'), async = _dereq_('async'), localforage = _dereq_('localforage'), FdbCompress = _dereq_('./PersistCompress'),// jshint ignore:line FdbCrypto = _dereq_('./PersistCrypto'),// jshint ignore:line Db, Collection, CollectionDrop, CollectionGroup, CollectionInit, DbInit, DbDrop, Persist, Overload;//, //DataVersion = '2.0'; /** * The persistent storage class handles loading and saving data to browser * storage. * @class * @constructor */ Persist = function () { this.init.apply(this, arguments); }; /** * The local forage library. */ Persist.prototype.localforage = localforage; /** * The init method that can be overridden or extended. * @param {Db} db The ForerunnerDB database instance. */ Persist.prototype.init = function (db) { var self = this; this._encodeSteps = [ function () { return self._encode.apply(self, arguments); } ]; this._decodeSteps = [ function () { return self._decode.apply(self, arguments); } ]; // Check environment if (db.isClient()) { this.mode('localforage'); localforage.config({ name: String(db.core().name()), storeName: 'FDB' }); } }; Shared.addModule('Persist', Persist); Shared.mixin(Persist.prototype, 'Mixin.ChainReactor'); Shared.mixin(Persist.prototype, 'Mixin.Common'); Db = Shared.modules.Db; Collection = _dereq_('./Collection'); CollectionDrop = Collection.prototype.drop; CollectionGroup = _dereq_('./CollectionGroup'); CollectionInit = Collection.prototype.init; DbInit = Db.prototype.init; DbDrop = Db.prototype.drop; Overload = Shared.overload; /** * Gets / sets the auto flag which determines if the persistence module * will automatically load data for collections the first time they are * accessed and save data whenever it changes. This is disabled by * default. * @param {Boolean} val Set to true to enable, false to disable * @returns {*} */ Shared.synthesize(Persist.prototype, 'auto', function (val) { var self = this; if (val !== undefined) { if (val) { // Hook db events this._db.on('create', function () { self._autoLoad.apply(self, arguments); }); this._db.on('change', function () { self._autoSave.apply(self, arguments); }); if (this._db.debug()) { console.log(this._db.logIdentifier() + ' Automatic load/save enabled'); } } else { // Un-hook db events this._db.off('create', this._autoLoad); this._db.off('change', this._autoSave); if (this._db.debug()) { console.log(this._db.logIdentifier() + ' Automatic load/save disabled'); } } } return this.$super.call(this, val); }); Persist.prototype._autoLoad = function (obj, objType, name) { var self = this; if (typeof obj.load === 'function') { if (self._db.debug()) { console.log(self._db.logIdentifier() + ' Auto-loading data for ' + objType + ':', name); } obj.load(function (err, data) { if (err && self._db.debug()) { console.log(self._db.logIdentifier() + ' Automatic load failed:', err); } self.emit('load', err, data); }); } else { if (self._db.debug()) { console.log(self._db.logIdentifier() + ' Auto-load for ' + objType + ':', name, 'no load method, skipping'); } } }; Persist.prototype._autoSave = function (obj, objType, name) { var self = this; if (typeof obj.save === 'function') { if (self._db.debug()) { console.log(self._db.logIdentifier() + ' Auto-saving data for ' + objType + ':', name); } obj.save(function (err, data) { if (err && self._db.debug()) { console.log(self._db.logIdentifier() + ' Automatic save failed:', err); } self.emit('save', err, data); }); } }; /** * Gets / sets the persistent storage mode (the library used * to persist data to the browser - defaults to localForage). * @param {String} type The library to use for storage. Defaults * to localForage. * @returns {*} */ Persist.prototype.mode = function (type) { if (type !== undefined) { this._mode = type; return this; } return this._mode; }; /** * Sets - if Persist is load in "localforage" mode - a custom driver which applies to the rules * given by localforage, given here: https://localforage.github.io/localForage/#driver-api-definedriver * @param driver A local forage driver definition * @param callback A possible callback, which is executed after the customdriver was initialized */ Persist.prototype.customdriver = function(driver, callback) { if (this.mode() !== 'localforage') { throw 'No interface ready to set custom driver!'; } var driverName = driver._driver; // copying the drivername to avoid possible variable modification errors localforage.defineDriver(driver).then(function() { return localforage.setDriver(driverName); }) .then(function() { if (callback) { callback(null); } }) ['catch'](function(err) { callback(err); }); }; /** * Gets / sets the driver used when persisting data. * @param {String} val Specify the driver type (LOCALSTORAGE, * WEBSQL or INDEXEDDB) * @returns {*} */ Persist.prototype.driver = function (val) { if (val !== undefined) { switch (val.toUpperCase()) { case 'LOCALSTORAGE': localforage.setDriver(localforage.LOCALSTORAGE); break; case 'WEBSQL': localforage.setDriver(localforage.WEBSQL); break; case 'INDEXEDDB': localforage.setDriver(localforage.INDEXEDDB); break; default: throw('ForerunnerDB.Persist: The persistence driver you have specified is not found. Please use either IndexedDB, WebSQL or LocalStorage!'); } return this; } return localforage.driver(); }; /** * Starts a decode waterfall process. * @param {*} val The data to be decoded. * @param {Function} finished The callback to pass final data to. */ Persist.prototype.decode = function (val, finished) { async.waterfall([function (callback) { if (callback) { callback(false, val, {}); } }].concat(this._decodeSteps), finished); }; /** * Starts an encode waterfall process. * @param {*} val The data to be encoded. * @param {Function} finished The callback to pass final data to. */ Persist.prototype.encode = function (val, finished) { async.waterfall([function (callback) { if (callback) { callback(false, val, {}); } }].concat(this._encodeSteps), finished); }; Shared.synthesize(Persist.prototype, 'encodeSteps'); Shared.synthesize(Persist.prototype, 'decodeSteps'); Persist.prototype.addStep = new Overload({ /** * Adds an encode/decode step to the persistent storage system so * that you can add custom functionality. * @name addStep * @method Persist.addStep * @param {Function} obj The object to encode / decode. */ 'object': function (obj) { return this.$main.call(this, function objEncode () { obj.encode.apply(obj, arguments); }, function objDecode () { obj.decode.apply(obj, arguments); }, 0); }, /** * Adds an encode/decode step to the persistent storage system so * that you can add custom functionality. * @name addStep * @method Persist.addStep * @param {Function} encode The encode method called with the data from the * previous encode step. When your method is complete it MUST call the * callback method. If you provide anything other than false to the err * parameter the encoder will fail and throw an error. * @param {Function} decode The decode method called with the data from the * previous decode step. When your method is complete it MUST call the * callback method. If you provide anything other than false to the err * parameter the decoder will fail and throw an error. */ 'function, function': function (encode, decode) { return this.$main.call(this, encode, decode, 0); }, /** * Adds an encode/decode step to the persistent storage system so * that you can add custom functionality. * @name addStep * @method Persist.addStep * @param {Function} encode The encode method called with the data from the * previous encode step. When your method is complete it MUST call the * callback method. If you provide anything other than false to the err * parameter the encoder will fail and throw an error. * @param {Function} decode The decode method called with the data from the * previous decode step. When your method is complete it MUST call the * callback method. If you provide anything other than false to the err * parameter the decoder will fail and throw an error. * @param {Number=} index Optional index to add the encoder step to. This * allows you to place a step before or after other existing steps. If not * provided your step is placed last in the list of steps. For instance if * you are providing an encryption step it makes sense to place this last * since all previous steps will then have their data encrypted by your * final step. */ 'function, function, number': function (encode, decode, index) { return this.$main.call(this, encode, decode, index); }, $main: function (encode, decode, index) { if (index === 0 || index === undefined) { this._encodeSteps.push(encode); this._decodeSteps.unshift(decode); } else { // Place encoder step at index then work out correct // index to place decoder step this._encodeSteps.splice(index, 0, encode); this._decodeSteps.splice(this._decodeSteps.length - index, 0, decode); } } }); Persist.prototype.unwrap = function (dataStr) { var parts = dataStr.split('::fdb::'), data; switch (parts[0]) { case 'json': data = this.jParse(parts[1]); break; case 'raw': data = parts[1]; break; default: break; } }; /** * Takes encoded data and decodes it for use as JS native objects and arrays. * @param {String} val The currently encoded string data. * @param {Object} meta Meta data object that can be used to pass back useful * supplementary data. * @param {Function} finished The callback method to call when decoding is * completed. * @private */ Persist.prototype._decode = function (val, meta, finished) { var parts, data; if (val) { parts = val.split('::fdb::'); switch (parts[0]) { case 'json': data = this.jParse(parts[1]); break; case 'raw': data = parts[1]; break; default: break; } if (data) { meta.foundData = true; meta.rowCount = data.length || 0; } else { meta.foundData = false; meta.rowCount = 0; } if (finished) { finished(false, data, meta); } } else { meta.foundData = false; meta.rowCount = 0; if (finished) { finished(false, val, meta); } } }; /** * Takes native JS data and encodes it for for storage as a string. * @param {Object} val The current un-encoded data. * @param {Object} meta Meta data object that can be used to pass back useful * supplementary data. * @param {Function} finished The callback method to call when encoding is * completed. * @private */ Persist.prototype._encode = function (val, meta, finished) { var data = val; if (typeof val === 'object') { val = 'json::fdb::' + this.jStringify(val); } else { val = 'raw::fdb::' + val; } if (data) { meta.foundData = true; meta.rowCount = data.length || 0; } else { meta.foundData = false; meta.rowCount = 0; } if (finished) { finished(false, val, meta); } }; /** * Encodes passed data and then stores it in the browser's persistent * storage layer. * @param {String} key The key to store the data under in the persistent * storage. * @param {Object} data The data to store under the key. * @param {Function=} callback The method to call when the save process * has completed. */ Persist.prototype.save = function (key, data, callback) { switch (this.mode()) { case 'localforage': this.encode(data, function (err, data, tableStats) { if (err) { return callback(err); } localforage.setItem(key, data, function (err) { if (callback) { if (err) { callback(err); return; } callback(false, data, tableStats); } }); }); break; default: if (callback) { callback('No data handler.'); } break; } }; /** * Loads and decodes data from the passed key. * @param {String} key The key to retrieve data from in the persistent * storage. * @param {Function=} callback The method to call when the load process * has completed. */ Persist.prototype.load = function (key, callback) { var self = this; switch (this.mode()) { case 'localforage': localforage.getItem(key, function (err, val) { if (err) { if (callback) { callback(err); } return; } self.decode(val, callback); }); break; default: if (callback) { callback('No data handler or unrecognised data type.'); } break; } }; /** * Deletes data in persistent storage stored under the passed key. * @param {String} key The key to drop data for in the storage. * @param {Function=} callback The method to call when the data is dropped. */ Persist.prototype.drop = function (key, callback) { switch (this.mode()) { case 'localforage': localforage.removeItem(key, function (err) { if (callback) { callback(err); } }); break; default: if (callback) { callback('No data handler or unrecognised data type.'); } break; } }; /** * Determines the byte size of a String, also taking special chars into account. * Inspired by http://codereview.stackexchange.com/questions/37512/count-byte-length-of-string * @param {String} string String to get bytes size of * @return {Number} Size in bytes of the input string * @private */ Persist.prototype._calcSize = function (string) { var iCount = 0, stringPrep = String(string || ""), stringLength = stringPrep.length, iPartCount, i; for (i = 0; i < stringLength; i++) { iPartCount = encodeURI(stringPrep[i]).split("%").length; iCount += iPartCount === 1 ? 1 : iPartCount - 1; } return iCount; }; /** * Argument to the persistedSize callback function * @typedef {Object} persistedSizeCallbackArg * @property {number} total The total size of either the collection or the DB in byte * @property {Array[]} collections per Array item: collection name and size as an Array tupel: collectionName, size */ /** * Callback for persisted size functions * * @callback persistedSizeCallback * @param {persistedSizeCallbackArg} */ /** * determine byte size of a persisted object, * either entire DB or specific collection * @param {string|object} target either a string (collection name) or an object (DB) * @param {persistedSizeCallback} callback method to call when size determination is done - mandatory for obtaining a result! */ Persist.prototype.persistedSize = function (target, callback) { var self = this, resultMap; // mapping: { // total: $totalsize // collections: [ // [collection, bytesize] // .... // ] // } resultMap = { total: 0, collections: [] }; /** * named helper function to map-reduce the total byte size of a persisted object * and to log each collections individual size * @param {String} value * @param {String} key * @private */ function _mapSizeCb (value, key) { // cont only if collection contains content if (value !== null) { // Both key and value of the persisted storage obj count toward total size var atomicTotal = self._calcSize(value) + self._calcSize(key); // Make an entry: collection (key) is $total bytes resultMap.collections.push([key, atomicTotal]); // Count towards total size only if value != null resultMap.total += atomicTotal; } } switch (this.mode()) { case 'localforage': // In general: don't do any decoding (enc, compression), // b/c only the "raw" size of the storage object is of interest switch ((typeof target).toLowerCase()) { case 'string': // Collection name was provided // Utilize localforages built-in Promise functionality (includes // shim for IE) get collection + collection meta data for size // determination localforage.getItem(target).then(function (value) { _mapSizeCb(value, target); }).then(function () { return localforage.getItem(target + "-metaData"); }).then(function (value) { _mapSizeCb(value, target + "-metaData"); }).then(function () { // done - report back if (callback) { callback(null, resultMap); } })['catch'](function (err) { console.error(JSON.stringify(err)); if (callback) { callback(err); } }); break; case 'object': // DB object was provided // determine size of DB // by iterating over all key/value pairs // filtering for this DB // we want to measure all persisted collections of a DB // independent of whether they're attached to the DB // at runtime via db.collection(name) localforage.iterate(function (value, key) { if ((key.lastIndexOf(target.name(), 0) === 0) && value !== null) { _mapSizeCb(value, key); } }).then(function () { // done - report back callback(null, resultMap); })['catch'](function (err) { console.error(JSON.stringify(err)); if (callback) { callback(err); } }); break; default: if (callback) { callback("Couldn't determine target for size calculation - must be either a collection name (string) or a db reference (object)"); } } break; default: if (callback) { callback('No data handler or unrecognised data type.'); } break; } }; // Extend the Collection prototype with persist methods Collection.prototype.drop = new Overload({ /** * Drop collection and persistent storage. * @name drop * @method Collection.drop */ '': function () { if (!this.isDropped()) { this.drop(true); } }, /** * Drop collection and persistent storage with callback. * @name drop * @method Collection.drop * @param {Function} callback Callback method. */ 'function': function (callback) { if (!this.isDropped()) { this.drop(true, callback); } }, /** * Drop collection and optionally drop persistent storage. * @name drop * @method Collection.drop * @param {Boolean} removePersistent True to drop persistent storage, false to keep it. */ 'boolean': function (removePersistent) { if (!this.isDropped()) { // Remove persistent storage if (removePersistent) { if (this._name) { if (this._db) { // Drop the collection data from storage this._db.persist.drop(this._db._name + '-' + this._name); this._db.persist.drop(this._db._name + '-' + this._name + '-metaData'); } } else { throw('ForerunnerDB.Persist: Cannot drop a collection\'s persistent storage when no name assigned to collection!'); } } // Call the original method CollectionDrop.call(this); } }, /** * Drop collections and optionally drop persistent storage with callback. * @name drop * @method Collection.drop * @param {Boolean} removePersistent True to drop persistent storage, false to keep it. * @param {Function} callback Callback method. */ 'boolean, function': function (removePersistent, callback) { var self = this; if (!this.isDropped()) { // Remove persistent storage if (removePersistent) { if (this._name) { if (this._db) { // Drop the collection data from storage this._db.persist.drop(this._db._name + '-' + this._name, function () { self._db.persist.drop(self._db._name + '-' + self._name + '-metaData', callback); }); return CollectionDrop.call(this); } else { if (callback) { callback('Cannot drop a collection\'s persistent storage when the collection is not attached to a database!'); } } } else { if (callback) { callback('Cannot drop a collection\'s persistent storage when no name assigned to collection!'); } } } else { // Call the original method return CollectionDrop.call(this, callback); } } } }); /** * Saves an entire collection's data to persistent storage. * @param {Function=} callback The method to call when the save function * has completed. */ Collection.prototype.save = function (callback) { var self = this, processSave; if (self._name) { if (self._db) { processSave = function () { // Save the collection data self._db.persist.save(self._db._name + '-' + self._name, self._data, function (err, tableData, tableStats) { if (err) { if (callback) { callback(err); } return; } self._db.persist.save(self._db._name + '-' + self._name + '-metaData', self.metaData(), function (err, metaData, metaStats) { self.deferEmit('save', tableStats, metaStats, { tableData: tableData, metaData: metaData, tableDataName: self._db._name + '-' + self._name, metaDataName: self._db._name + '-' + self._name + '-metaData' }); if (callback) { // Defer till the next VM tick to give the VM some breathing room // around the persistent data getting into storage. setTimeout(function () { callback(err, tableStats, metaStats, { tableData: tableData, metaData: metaData, tableDataName: self._db._name + '-' + self._name, metaDataName: self._db._name + '-' + self._name + '-metaData' }); }, 1); } }); }); }; // Check for processing queues if (self.isProcessingQueue()) { // Hook queue complete to process save self.on('queuesComplete', function () { processSave(); }); } else { // Process save immediately processSave(); } } else { if (callback) { callback('Cannot save a collection that is not attached to a database!'); } } } else { if (callback) { callback('Cannot save a collection with no assigned name!'); } } }; /** * Determines the byte size of a persisted collection * @param {persistedSizeCallback} callback The method to call when the size check is complete */ Collection.prototype.persistedSize = function (callback) { var self = this; if (self._name) { if (self._db) { self._db.persist.persistedSize(self._db._name + '-' + self._name, callback); } else { if (callback) { callback('Cannot determine persisted size of a collection that is not attached to a database!'); } } } else { if (callback) { callback('Cannot determine persisted size of a collection with no assigned name!'); } } }; /** * Loads an entire collection's data from persistent storage. * @param {Function=} callback The method to call when the load function * has completed. */ Collection.prototype.load = function (callback) { var self = this; if (self._name) { if (self._db) { // Load the collection data self._db.persist.load(self._db._name + '-' + self._name, function (err, data, tableStats) { if (!err) { // Remove all previous data self.remove({}, function () { // Now insert the new data data = data || []; self.insert(data, function () { // Now load the collection's metadata self._db.persist.load(self._db._name + '-' + self._name + '-metaData', function (err, data, metaStats) { if (!err) { if (data) { self.metaData(data); } } self.deferEmit('load', tableStats, metaStats); if (callback) { callback(err, tableStats, metaStats); } }); }); //self.setData(data); }); } else { self.deferEmit('load', tableStats); if (callback) { callback(err); } } }); } else { if (callback) { callback('Cannot load a collection that is not attached to a database!'); } } } else { if (callback) { callback('Cannot load a collection with no assigned name!'); } } }; /** * Gets the data that represents this collection for easy storage using * a third-party method / plugin instead of using the standard persistent * storage system. * @param {Function} callback The method to call with the data response. */ Collection.prototype.saveCustom = function (callback) { var self = this, myData = {}, processSave; if (self._name) { if (self._db) { processSave = function () { self.encode(self._data, function (err, data, tableStats) { if (!err) { myData.data = { name: self._db._name + '-' + self._name, store: data, tableStats: tableStats }; self.encode(self._data, function (err, data, tableStats) { if (!err) { myData.metaData = { name: self._db._name + '-' + self._name + '-metaData', store: data, tableStats: tableStats }; callback(false, myData); } else { callback(err); } }); } else { callback(err); } }); }; // Check for processing queues if (self.isProcessingQueue()) { // Hook queue complete to process save self.on('queuesComplete', function () { processSave(); }); } else { // Process save immediately processSave(); } } else { if (callback) { callback('Cannot save a collection that is not attached to a database!'); } } } else { if (callback) { callback('Cannot save a collection with no assigned name!'); } } }; /** * Loads custom data loaded by a third-party plugin into the collection. * @param {Object} myData Data object previously saved by using saveCustom() * @param {Function} callback A callback method to receive notification when * data has loaded. */ Collection.prototype.loadCustom = function (myData, callback) { var self = this; if (self._name) { if (self._db) { if (myData.data && myData.data.store) { if (myData.metaData && myData.metaData.store) { self.decode(myData.data.store, function (err, data, tableStats) { if (!err) { if (data) { // Remove all previous data self.remove({}); self.insert(data); self.decode(myData.metaData.store, function (err, data, metaStats) { if (!err) { if (data) { self.metaData(data); if (callback) { callback(err, tableStats, metaStats); } } } else { callback(err); } }); } } else { callback(err); } }); } else { callback('No "metaData" key found in passed object!'); } } else { callback('No "data" key found in passed object!'); } } else { if (callback) { callback('Cannot load a collection that is not attached to a database!'); } } } else { if (callback) { callback('Cannot load a collection with no assigned name!'); } } }; // Override the DB init to instantiate the plugin Db.prototype.init = function () { DbInit.apply(this, arguments); this.persist = new Persist(this); }; Db.prototype.load = new Overload({ /** * Loads an entire database's data from persistent storage. * @name load * @method Db.load * @param {Function=} callback The method to call when the load function * has completed. */ 'function': function (callback) { this.$main.call(this, undefined, callback); }, /** * Loads an entire database's data from persistent storage. * @name load * @method Db.load * @param {Object} myData Custom data to load into the collection. * @param {Function} callback The method to call when the load function * has completed. */ 'object, function': function (myData, callback) { this.$main.call(this, myData, callback); }, '$main': function (myData, callback) { // Loop the collections in the database var self = this, obj, keys, keyCount, loadCallback, index; obj = this._collection; keys = Object.keys(obj); keyCount = keys.length; if (keyCount <= 0) { return callback(false); } loadCallback = function (err) { if (!err) { keyCount--; if (keyCount <= 0) { self.deferEmit('load'); if (callback) { callback(false); } } } else { if (callback) { callback(err); } } }; for (index in obj) { if (obj.hasOwnProperty(index)) { // Call the collection load method if (!myData) { obj[index].load(loadCallback); } else { obj[index].loadCustom(myData, loadCallback); } } } } }); Db.prototype.save = new Overload({ /** * Saves an entire database's data to persistent storage. * @name save * @method Db.save * @param {Function=} callback The method to call when the save function * has completed. */ 'function': function (callback) { this.$main.call(this, {}, callback); }, /** * Saves an entire database's data to persistent storage. * @name save * @method Db.save * @param {Object} options The options object. * @param {Function} callback The method to call when the save function * has completed. */ 'object, function': function (options, callback) { this.$main.call(this, options, callback); }, '$main': function (options, callback) { // Loop the collections in the database var self = this, obj, keys, keyCount, saveCallback, index; obj = this._collection; keys = Object.keys(obj); keyCount = keys.length; if (keyCount <= 0) { return callback(false); } saveCallback = function (err) { if (!err) { keyCount--; if (keyCount <= 0) { self.deferEmit('save'); if (callback) { callback(false); } } } else { if (callback) { callback(err); } } }; for (index in obj) { if (obj.hasOwnProperty(index)) { // Call the collection save method if (!options.custom) { obj[index].save(saveCallback); } else { obj[index].saveCustom(saveCallback); } } } } }); /** * Determines the byte size of a persisted DB * @param {persistedSizeCallback} callback The method to call when the size check is complete */ Db.prototype.persistedSize = function(callback) { this.persist.persistedSize(this, callback); }; Shared.finishModule('Persist'); module.exports = Persist; },{"./Collection":6,"./CollectionGroup":7,"./PersistCompress":36,"./PersistCrypto":37,"./Shared":40,"async":43,"localforage":78}],36:[function(_dereq_,module,exports){ "use strict"; var Shared = _dereq_('./Shared'), pako = _dereq_('pako'); var Plugin = function () { this.init.apply(this, arguments); }; Plugin.prototype.init = function (options) { }; Shared.mixin(Plugin.prototype, 'Mixin.Common'); Plugin.prototype.encode = function (val, meta, finished) { var wrapper = { data: val, type: 'fdbCompress', enabled: false }, before, after, compressedVal; // Compress the data before = val.length; compressedVal = pako.deflate(val, {to: 'string'}); after = compressedVal.length; // If the compressed version is smaller than the original, use it! if (after < before) { wrapper.data = compressedVal; wrapper.enabled = true; } meta.compression = { enabled: wrapper.enabled, compressedBytes: after, uncompressedBytes: before, effect: Math.round((100 / before) * after) + '%' }; finished(false, this.jStringify(wrapper), meta); }; Plugin.prototype.decode = function (wrapper, meta, finished) { var compressionEnabled = false, data; if (wrapper) { wrapper = this.jParse(wrapper); // Check if we need to decompress the string if (wrapper.enabled) { data = pako.inflate(wrapper.data, {to: 'string'}); compressionEnabled = true; } else { data = wrapper.data; compressionEnabled = false; } meta.compression = { enabled: compressionEnabled }; if (finished) { finished(false, data, meta); } } else { if (finished) { finished(false, data, meta); } } }; // Register this plugin with ForerunnerDB Shared.plugins.FdbCompress = Plugin; module.exports = Plugin; },{"./Shared":40,"pako":79}],37:[function(_dereq_,module,exports){ "use strict"; var Shared = _dereq_('./Shared'), CryptoJS = _dereq_('crypto-js'); var Plugin = function () { this.init.apply(this, arguments); }; Plugin.prototype.init = function (options) { // Ensure at least a password is passed in options if (!options || !options.pass) { throw('Cannot initialise persistent storage encryption without a passphrase provided in the passed options object as the "pass" field.'); } this._algo = options.algo || 'AES'; this._pass = options.pass; }; Shared.mixin(Plugin.prototype, 'Mixin.Common'); /** * Gets / sets the current pass-phrase being used to encrypt / decrypt * data with the plugin. */ Shared.synthesize(Plugin.prototype, 'pass'); Plugin.prototype.stringify = function (cipherParams) { // create json object with ciphertext var jsonObj = { ct: cipherParams.ciphertext.toString(CryptoJS.enc.Base64) }; // optionally add iv and salt if (cipherParams.iv) { jsonObj.iv = cipherParams.iv.toString(); } if (cipherParams.salt) { jsonObj.s = cipherParams.salt.toString(); } // stringify json object return this.jStringify(jsonObj); }; Plugin.prototype.parse = function (jsonStr) { // parse json string var jsonObj = this.jParse(jsonStr); // extract ciphertext from json object, and create cipher params object var cipherParams = CryptoJS.lib.CipherParams.create({ ciphertext: CryptoJS.enc.Base64.parse(jsonObj.ct) }); // optionally extract iv and salt if (jsonObj.iv) { cipherParams.iv = CryptoJS.enc.Hex.parse(jsonObj.iv); } if (jsonObj.s) { cipherParams.salt = CryptoJS.enc.Hex.parse(jsonObj.s); } return cipherParams; }; Plugin.prototype.encode = function (val, meta, finished) { var self = this, wrapper = { type: 'fdbCrypto' }, encryptedVal; // Encrypt the data encryptedVal = CryptoJS[this._algo].encrypt(val, this._pass, { format: { stringify: function () { return self.stringify.apply(self, arguments); }, parse: function () { return self.parse.apply(self, arguments); } } }); wrapper.data = encryptedVal.toString(); wrapper.enabled = true; meta.encryption = { enabled: wrapper.enabled }; if (finished) { finished(false, this.jStringify(wrapper), meta); } }; Plugin.prototype.decode = function (wrapper, meta, finished) { var self = this, data; if (wrapper) { wrapper = this.jParse(wrapper); try { data = CryptoJS[this._algo].decrypt(wrapper.data, this._pass, { format: { stringify: function () { return self.stringify.apply(self, arguments); }, parse: function () { return self.parse.apply(self, arguments); } } }).toString(CryptoJS.enc.Utf8); if (!data) { // Crypto failed - CryptoJS just returns a blank string most of the time // if an incorrect password is used. Perhaps we should do this: // http://stackoverflow.com/questions/23188593/cryptojs-check-if-aes-passphrase-is-correct if (finished) { finished('Crypto failed to decrypt data, incorrect password?', data, meta); } return; } } catch (e) { if (finished) { finished('Crypto failed to decrypt data, incorrect password?', data, meta); } return; } if (finished) { finished(false, data, meta); } } else { if (finished) { finished(false, wrapper, meta); } } }; // Register this plugin with ForerunnerDB Shared.plugins.FdbCrypto = Plugin; module.exports = Plugin; },{"./Shared":40,"crypto-js":52}],38:[function(_dereq_,module,exports){ "use strict"; var Shared = _dereq_('./Shared'); /** * Provides chain reactor node linking so that a chain reaction can propagate * down a node tree. Effectively creates a chain link between the reactorIn and * reactorOut objects where a chain reaction from the reactorIn is passed through * the reactorProcess before being passed to the reactorOut object. Reactor * packets are only passed through to the reactorOut if the reactor IO method * chainSend is used. * @param {*} reactorIn An object that has the Mixin.ChainReactor methods mixed * in to it. Chain reactions that occur inside this object will be passed through * to the reactorOut object. * @param {*} reactorOut An object that has the Mixin.ChainReactor methods mixed * in to it. Chain reactions that occur in the reactorIn object will be passed * through to this object. * @param {Function} reactorProcess The processing method to use when chain * reactions occur. * @constructor */ var ReactorIO = function (reactorIn, reactorOut, reactorProcess) { if (reactorIn && reactorOut && reactorProcess) { this._reactorIn = reactorIn; this._reactorOut = reactorOut; this._chainHandler = reactorProcess; if (!reactorIn.chain) { throw('ForerunnerDB.ReactorIO: ReactorIO requires passed in and out objects to implement the ChainReactor mixin!'); } // Register the reactorIO with the input reactorIn.chain(this); // Register the output with the reactorIO this.chain(reactorOut); } else { throw('ForerunnerDB.ReactorIO: ReactorIO requires in, out and process arguments to instantiate!'); } }; Shared.addModule('ReactorIO', ReactorIO); /** * Drop a reactor IO object, breaking the reactor link between the in and out * reactor nodes. * @returns {boolean} */ ReactorIO.prototype.drop = function () { if (!this.isDropped()) { this._state = 'dropped'; // Remove links if (this._reactorIn) { this._reactorIn.unChain(this); } if (this._reactorOut) { this.unChain(this._reactorOut); } delete this._reactorIn; delete this._reactorOut; delete this._chainHandler; this.emit('drop', this); delete this._listeners; } return true; }; /** * Gets / sets the current state. * @param {String=} val The name of the state to set. * @returns {*} */ Shared.synthesize(ReactorIO.prototype, 'state'); Shared.mixin(ReactorIO.prototype, 'Mixin.Common'); Shared.mixin(ReactorIO.prototype, 'Mixin.ChainReactor'); Shared.mixin(ReactorIO.prototype, 'Mixin.Events'); Shared.finishModule('ReactorIO'); module.exports = ReactorIO; },{"./Shared":40}],39:[function(_dereq_,module,exports){ "use strict"; /** * Provides functionality to encode and decode JavaScript objects to strings * and back again. This differs from JSON.stringify and JSON.parse in that * special objects such as dates can be encoded to strings and back again * so that the reconstituted version of the string still contains a JavaScript * date object. * @constructor */ var Serialiser = function () { this.init.apply(this, arguments); }; Serialiser.prototype.init = function () { var self = this; this._encoder = []; this._decoder = []; // Handler for Date() objects this.registerHandler('$date', function (objInstance) { if (objInstance instanceof Date) { // Augment this date object with a new toJSON method objInstance.toJSON = function () { return "$date:" + this.toISOString(); }; // Tell the converter we have matched this object return true; } // Tell converter to keep looking, we didn't match this object return false; }, function (data) { if (typeof data === 'string' && data.indexOf('$date:') === 0) { return self.convert(new Date(data.substr(6))); } return undefined; }); // Handler for RegExp() objects this.registerHandler('$regexp', function (objInstance) { if (objInstance instanceof RegExp) { objInstance.toJSON = function () { return "$regexp:" + this.source.length + ":" + this.source + ":" + (this.global ? 'g' : '') + (this.ignoreCase ? 'i' : ''); /*return { source: this.source, params: '' + (this.global ? 'g' : '') + (this.ignoreCase ? 'i' : '') };*/ }; // Tell the converter we have matched this object return true; } // Tell converter to keep looking, we didn't match this object return false; }, function (data) { if (typeof data === 'string' && data.indexOf('$regexp:') === 0) { var dataStr = data.substr(8),//± lengthEnd = dataStr.indexOf(':'), sourceLength = Number(dataStr.substr(0, lengthEnd)), source = dataStr.substr(lengthEnd + 1, sourceLength), params = dataStr.substr(lengthEnd + sourceLength + 2); return self.convert(new RegExp(source, params)); } return undefined; }); }; Serialiser.prototype.registerHandler = function (handles, encoder, decoder) { if (handles !== undefined) { // Register encoder this._encoder.push(encoder); // Register decoder this._decoder.push(decoder); } }; Serialiser.prototype.convert = function (data) { // Run through converters and check for match var arr = this._encoder, i; for (i = 0; i < arr.length; i++) { if (arr[i](data)) { // The converter we called matched the object and converted it // so let's return it now. return data; } } // No converter matched the object, return the unaltered one return data; }; Serialiser.prototype.reviver = function () { var arr = this._decoder; return function (key, value) { // Check if we have a decoder method for this key var decodedData, i; for (i = 0; i < arr.length; i++) { decodedData = arr[i](value); if (decodedData !== undefined) { // The decoder we called matched the object and decoded it // so let's return it now. return decodedData; } } // No decoder, return basic value return value; }; }; module.exports = Serialiser; },{}],40:[function(_dereq_,module,exports){ "use strict"; var Overload = _dereq_('./Overload'); /** * A shared object that can be used to store arbitrary data between class * instances, and access helper methods. * @mixin */ var Shared = { version: '2.0.24', modules: {}, plugins: {}, index: {}, _synth: {}, /** * Adds a module to ForerunnerDB. * @memberof Shared * @param {String} name The name of the module. * @param {Function} module The module class. */ addModule: function (name, module) { // Store the module in the module registry this.modules[name] = module; // Tell the universe we are loading this module this.emit('moduleLoad', [name, module]); }, /** * Called by the module once all processing has been completed. Used to determine * if the module is ready for use by other modules. * @memberof Shared * @param {String} name The name of the module. */ finishModule: function (name) { if (this.modules[name]) { // Set the finished loading flag to true this.modules[name]._fdbFinished = true; // Assign the module name to itself so it knows what it // is called if (this.modules[name].prototype) { this.modules[name].prototype.className = name; } else { this.modules[name].className = name; } this.emit('moduleFinished', [name, this.modules[name]]); } else { throw('ForerunnerDB.Shared: finishModule called on a module that has not been registered with addModule(): ' + name); } }, /** * Will call your callback method when the specified module has loaded. If the module * is already loaded the callback is called immediately. * @memberof Shared * @param {String} name The name of the module. * @param {Function} callback The callback method to call when the module is loaded. */ moduleFinished: function (name, callback) { if (this.modules[name] && this.modules[name]._fdbFinished) { if (callback) { callback(name, this.modules[name]); } } else { this.on('moduleFinished', callback); } }, /** * Determines if a module has been added to ForerunnerDB or not. * @memberof Shared * @param {String} name The name of the module. * @returns {Boolean} True if the module exists or false if not. */ moduleExists: function (name) { return Boolean(this.modules[name]); }, mixin: new Overload({ /** * Adds the properties and methods defined in the mixin to the passed * object. * @memberof Shared * @name mixin * @param {Object} obj The target object to add mixin key/values to. * @param {String} mixinName The name of the mixin to add to the object. */ 'object, string': function (obj, mixinName) { var mixinObj; if (typeof mixinName === 'string') { mixinObj = this.mixins[mixinName]; if (!mixinObj) { throw('ForerunnerDB.Shared: Cannot find mixin named: ' + mixinName); } } return this.$main.call(this, obj, mixinObj); }, /** * Adds the properties and methods defined in the mixin to the passed * object. * @memberof Shared * @name mixin * @param {Object} obj The target object to add mixin key/values to. * @param {Object} mixinObj The object containing the keys to mix into * the target object. */ 'object, *': function (obj, mixinObj) { return this.$main.call(this, obj, mixinObj); }, '$main': function (obj, mixinObj) { if (mixinObj && typeof mixinObj === 'object') { for (var i in mixinObj) { if (mixinObj.hasOwnProperty(i)) { obj[i] = mixinObj[i]; } } } return obj; } }), /** * Generates a generic getter/setter method for the passed method name. * @memberof Shared * @param {Object} obj The object to add the getter/setter to. * @param {String} name The name of the getter/setter to generate. * @param {Function=} extend A method to call before executing the getter/setter. * The existing getter/setter can be accessed from the extend method via the * $super e.g. this.$super(); */ synthesize: function (obj, name, extend) { this._synth[name] = this._synth[name] || function (val) { if (val !== undefined) { this['_' + name] = val; return this; } return this['_' + name]; }; if (extend) { var self = this; obj[name] = function () { var tmp = this.$super, ret; this.$super = self._synth[name]; ret = extend.apply(this, arguments); this.$super = tmp; return ret; }; } else { obj[name] = this._synth[name]; } }, /** * Allows a method to be overloaded. * @memberof Shared * @param arr * @returns {Function} * @constructor */ overload: Overload, /** * Define the mixins that other modules can use as required. * @memberof Shared */ mixins: { 'Mixin.Common': _dereq_('./Mixin.Common'), 'Mixin.Events': _dereq_('./Mixin.Events'), 'Mixin.ChainReactor': _dereq_('./Mixin.ChainReactor'), 'Mixin.CRUD': _dereq_('./Mixin.CRUD'), 'Mixin.Constants': _dereq_('./Mixin.Constants'), 'Mixin.Triggers': _dereq_('./Mixin.Triggers'), 'Mixin.Sorting': _dereq_('./Mixin.Sorting'), 'Mixin.Matching': _dereq_('./Mixin.Matching'), 'Mixin.Updating': _dereq_('./Mixin.Updating'), 'Mixin.Tags': _dereq_('./Mixin.Tags') } }; // Add event handling to shared Shared.mixin(Shared, 'Mixin.Events'); module.exports = Shared; },{"./Mixin.CRUD":20,"./Mixin.ChainReactor":21,"./Mixin.Common":22,"./Mixin.Constants":23,"./Mixin.Events":24,"./Mixin.Matching":25,"./Mixin.Sorting":26,"./Mixin.Tags":27,"./Mixin.Triggers":28,"./Mixin.Updating":29,"./Overload":32}],41:[function(_dereq_,module,exports){ /* jshint strict:false */ if (!Array.prototype.filter) { Array.prototype.filter = function(fun/*, thisArg*/) { if (this === void 0 || this === null) { throw new TypeError(); } var t = Object(this); var len = t.length >>> 0; // jshint ignore:line if (typeof fun !== 'function') { throw new TypeError(); } var res = []; var thisArg = arguments.length >= 2 ? arguments[1] : void 0; for (var i = 0; i < len; i++) { if (i in t) { var val = t[i]; // NOTE: Technically this should Object.defineProperty at // the next index, as push can be affected by // properties on Object.prototype and Array.prototype. // But that method's new, and collisions should be // rare, so use the more-compatible alternative. if (fun.call(thisArg, val, i, t)) { res.push(val); } } } return res; }; } if (typeof Object.create !== 'function') { Object.create = (function() { var Temp = function() {}; return function (prototype) { if (arguments.length > 1) { throw Error('Second argument not supported'); } if (typeof prototype !== 'object') { throw TypeError('Argument must be an object'); } Temp.prototype = prototype; var result = new Temp(); Temp.prototype = null; return result; }; })(); } // Production steps of ECMA-262, Edition 5, 15.4.4.14 // Reference: http://es5.github.io/#x15.4.4.14e if (!Array.prototype.indexOf) { Array.prototype.indexOf = function(searchElement, fromIndex) { var k; // 1. Let O be the result of calling ToObject passing // the this value as the argument. if (this === null) { throw new TypeError('"this" is null or not defined'); } var O = Object(this); // 2. Let lenValue be the result of calling the Get // internal method of O with the argument "length". // 3. Let len be ToUint32(lenValue). var len = O.length >>> 0; // jshint ignore:line // 4. If len is 0, return -1. if (len === 0) { return -1; } // 5. If argument fromIndex was passed let n be // ToInteger(fromIndex); else let n be 0. var n = +fromIndex || 0; if (Math.abs(n) === Infinity) { n = 0; } // 6. If n >= len, return -1. if (n >= len) { return -1; } // 7. If n >= 0, then Let k be n. // 8. Else, n<0, Let k be len - abs(n). // If k is less than 0, then let k be 0. k = Math.max(n >= 0 ? n : len - Math.abs(n), 0); // 9. Repeat, while k < len while (k < len) { // a. Let Pk be ToString(k). // This is implicit for LHS operands of the in operator // b. Let kPresent be the result of calling the // HasProperty internal method of O with argument Pk. // This step can be combined with c // c. If kPresent is true, then // i. Let elementK be the result of calling the Get // internal method of O with the argument ToString(k). // ii. Let same be the result of applying the // Strict Equality Comparison Algorithm to // searchElement and elementK. // iii. If same is true, return k. if (k in O && O[k] === searchElement) { return k; } k++; } return -1; }; } module.exports = {}; },{}],42:[function(_dereq_,module,exports){ "use strict"; // Import external names locally var Shared, Db, Collection, CollectionGroup, CollectionInit, DbInit, ReactorIO, ActiveBucket, Overload = _dereq_('./Overload'), Path, sharedPathSolver; Shared = _dereq_('./Shared'); /** * Creates a new view instance. * @param {String} name The name of the view. * @param {Object=} query The view's query. * @param {Object=} options An options object. * @constructor */ var View = function (name, query, options) { this.init.apply(this, arguments); }; Shared.addModule('View', View); Shared.mixin(View.prototype, 'Mixin.Common'); Shared.mixin(View.prototype, 'Mixin.Matching'); Shared.mixin(View.prototype, 'Mixin.ChainReactor'); Shared.mixin(View.prototype, 'Mixin.Constants'); //Shared.mixin(View.prototype, 'Mixin.Triggers'); Shared.mixin(View.prototype, 'Mixin.Tags'); Collection = _dereq_('./Collection'); CollectionGroup = _dereq_('./CollectionGroup'); ActiveBucket = _dereq_('./ActiveBucket'); ReactorIO = _dereq_('./ReactorIO'); CollectionInit = Collection.prototype.init; Db = Shared.modules.Db; DbInit = Db.prototype.init; Path = Shared.modules.Path; sharedPathSolver = new Path(); View.prototype.init = function (name, query, options) { var self = this; this.sharedPathSolver = sharedPathSolver; this._name = name; this._listeners = {}; this._querySettings = {}; this._debug = {}; this.query(query, options, false); this._collectionDroppedWrap = function () { self._collectionDropped.apply(self, arguments); }; this._data = new Collection(this.name() + '_internal'); }; /** * This reactor IO node is given data changes from source data and * then acts as a firewall process between the source and view data. * Data needs to meet the requirements this IO node imposes before * the data is passed down the reactor chain (to the view). This * allows us to intercept data changes from the data source and act * on them such as applying transforms, checking the data matches * the view's query, applying joins to the data etc before sending it * down the reactor chain via the this.chainSend() calls. * * Update packets are especially complex to handle because an update * on the underlying source data could translate into an insert, * update or remove call on the view. Take a scenario where the view's * query limits the data seen from the source. If the source data is * updated and the data now falls inside the view's query limitations * the data is technically now an insert on the view, not an update. * The same is true in reverse where the update becomes a remove. If * the updated data already exists in the view and will still exist * after the update operation then the update can remain an update. * @param {Object} chainPacket The chain reactor packet representing the * data operation that has just been processed on the source data. * @param {View} self The reference to the view we are operating for. * @private */ View.prototype._handleChainIO = function (chainPacket, self) { var type = chainPacket.type, hasActiveJoin, hasActiveQuery, hasTransformIn, sharedData; // NOTE: "self" in this context is the view instance. // NOTE: "this" in this context is the ReactorIO node sitting in // between the source (sender) and the destination (listener) and // in this case the source is the view's "from" data source and the // destination is the view's _data collection. This means // that "this.chainSend()" is asking the ReactorIO node to send the // packet on to the destination listener. // EARLY EXIT: Check that the packet is not a CRUD operation if (type !== 'setData' && type !== 'insert' && type !== 'update' && type !== 'remove') { // This packet is NOT a CRUD operation packet so exit early! // Returning false informs the chain reactor to continue propagation // of the chain packet down the graph tree return false; } // We only need to check packets under three conditions // 1) We have a limiting query on the view "active query", // 2) We have a query options with a $join clause on the view "active join" // 3) We have a transformIn operation registered on the view. // If none of these conditions exist we can just allow the chain // packet to proceed as normal hasActiveJoin = Boolean(self._querySettings.options && self._querySettings.options.$join); hasActiveQuery = Boolean(self._querySettings.query); hasTransformIn = self._data._transformIn !== undefined; // EARLY EXIT: Check for any complex operation flags and if none // exist, send the packet on and exit early if (!hasActiveJoin && !hasActiveQuery && !hasTransformIn) { // We don't have any complex operation flags so exit early! // Returning false informs the chain reactor to continue propagation // of the chain packet down the graph tree return false; } // We have either an active query, active join or a transformIn // function registered on the view // We create a shared data object here so that the disparate method // calls can share data with each other via this object whilst // still remaining separate methods to keep code relatively clean. sharedData = { dataArr: [], removeArr: [] }; // Check the packet type to get the data arrays to work on if (chainPacket.type === 'insert') { // Check if the insert data is an array if (chainPacket.data.dataSet instanceof Array) { // Use the insert data array sharedData.dataArr = chainPacket.data.dataSet; } else { // Generate an array from the single insert object sharedData.dataArr = [chainPacket.data.dataSet]; } } else if (chainPacket.type === 'update') { // Use the dataSet array sharedData.dataArr = chainPacket.data.dataSet; } else if (chainPacket.type === 'remove') { if (chainPacket.data.dataSet instanceof Array) { // Use the remove data array sharedData.removeArr = chainPacket.data.dataSet; } else { // Generate an array from the single remove object sharedData.removeArr = [chainPacket.data.dataSet]; } } // Safety check if (!(sharedData.dataArr instanceof Array)) { // This shouldn't happen, let's log it console.warn('WARNING: dataArr being processed by chain reactor in View class is inconsistent!'); sharedData.dataArr = []; } if (!(sharedData.removeArr instanceof Array)) { // This shouldn't happen, let's log it console.warn('WARNING: removeArr being processed by chain reactor in View class is inconsistent!'); sharedData.removeArr = []; } // We need to operate in this order: // 1) Check if there is an active join - active joins are operated // against the SOURCE data. The joined data can potentially be // utilised by any active query or transformIn so we do this step first. // 2) Check if there is an active query - this is a query that is run // against the SOURCE data after any active joins have been resolved // on the source data. This allows an active query to operate on data // that would only exist after an active join has been executed. // If the source data does not fall inside the limiting factors of the // active query then we add it to a removal array. If it does fall // inside the limiting factors when we add it to an upsert array. This // is because data that falls inside the query could end up being // either new data or updated data after a transformIn operation. // 3) Check if there is a transformIn function. If a transformIn function // exist we run it against the data after doing any active join and // active query. if (hasActiveJoin) { if (this.debug()) { console.time(this.logIdentifier() + ' :: _handleChainIO_ActiveJoin'); } self._handleChainIO_ActiveJoin(chainPacket, sharedData); if (this.debug()) { console.timeEnd(this.logIdentifier() + ' :: _handleChainIO_ActiveJoin'); } } if (hasActiveQuery) { if (this.debug()) { console.time(this.logIdentifier() + ' :: _handleChainIO_ActiveQuery'); } self._handleChainIO_ActiveQuery(chainPacket, sharedData); if (this.debug()) { console.timeEnd(this.logIdentifier() + ' :: _handleChainIO_ActiveQuery'); } } if (hasTransformIn) { if (this.debug()) { console.time(this.logIdentifier() + ' :: _handleChainIO_TransformIn'); } self._handleChainIO_TransformIn(chainPacket, sharedData); if (this.debug()) { console.timeEnd(this.logIdentifier() + ' :: _handleChainIO_TransformIn'); } } // Check if we still have data to operate on and exit // if there is none left if (!sharedData.dataArr.length && !sharedData.removeArr.length) { // There is no more data to operate on, exit without // sending any data down the chain reactor (return true // will tell reactor to exit without continuing)! return true; } // Grab the public data collection's primary key sharedData.pk = self._data.primaryKey(); // We still have data left, let's work out how to handle it // first let's loop through the removals as these are easy if (sharedData.removeArr.length) { if (this.debug()) { console.time(this.logIdentifier() + ' :: _handleChainIO_RemovePackets'); } self._handleChainIO_RemovePackets(this, chainPacket, sharedData); if (this.debug()) { console.timeEnd(this.logIdentifier() + ' :: _handleChainIO_RemovePackets'); } } if (sharedData.dataArr.length) { if (this.debug()) { console.time(this.logIdentifier() + ' :: _handleChainIO_UpsertPackets'); } self._handleChainIO_UpsertPackets(this, chainPacket, sharedData); if (this.debug()) { console.timeEnd(this.logIdentifier() + ' :: _handleChainIO_UpsertPackets'); } } // Now return true to tell the chain reactor not to propagate // the data itself as we have done all that work here return true; }; View.prototype._handleChainIO_ActiveJoin = function (chainPacket, sharedData) { var dataArr = sharedData.dataArr, removeArr; // Since we have an active join, all we need to do is operate // the join clause on each item in the packet's data array. removeArr = this.applyJoin(dataArr, this._querySettings.options.$join, {}, {}); // Now that we've run our join keep in mind that joins can exclude data // if there is no matching joined data and the require: true clause in // the join options is enabled. This means we have to store a removal // array that tells us which items from the original data we sent to // join did not match the join data and were set with a require flag. // Now that we have our array of items to remove, let's run through the // original data and remove them from there. this.spliceArrayByIndexList(dataArr, removeArr); // Make sure we add any items we removed to the shared removeArr sharedData.removeArr = sharedData.removeArr.concat(removeArr); }; View.prototype._handleChainIO_ActiveQuery = function (chainPacket, sharedData) { var self = this, dataArr = sharedData.dataArr, i; // Now we need to run the data against the active query to // see if the data should be in the final data list or not, // so we use the _match method. // Loop backwards so we can safely splice from the array // while we are looping for (i = dataArr.length - 1; i >= 0; i--) { if (!self._match(dataArr[i], self._querySettings.query, self._querySettings.options, 'and', {})) { // The data didn't match the active query, add it // to the shared removeArr sharedData.removeArr.push(dataArr[i]); // Now remove it from the shared dataArr dataArr.splice(i, 1); } } }; View.prototype._handleChainIO_TransformIn = function (chainPacket, sharedData) { var self = this, dataArr = sharedData.dataArr, removeArr = sharedData.removeArr, dataIn = self._data._transformIn, i; // At this stage we take the remaining items still left in the data // array and run our transformIn method on each one, modifying it // from what it was to what it should be on the view. We also have // to run this on items we want to remove too because transforms can // affect primary keys and therefore stop us from identifying the // correct items to run removal operations on. // It is important that these are transformed BEFORE they are passed // to the CRUD methods because we use the CU data to check the position // of the item in the array and that can only happen if it is already // pre-transformed. The removal stuff also needs pre-transformed // because ids can be modified by a transform. for (i = 0; i < dataArr.length; i++) { // Assign the new value dataArr[i] = dataIn(dataArr[i]); } for (i = 0; i < removeArr.length; i++) { // Assign the new value removeArr[i] = dataIn(removeArr[i]); } }; View.prototype._handleChainIO_RemovePackets = function (ioObj, chainPacket, sharedData) { var $or = [], pk = sharedData.pk, removeArr = sharedData.removeArr, packet = { dataSet: removeArr, query: { $or: $or } }, orObj, i; for (i = 0; i < removeArr.length; i++) { orObj = {}; orObj[pk] = removeArr[i][pk]; $or.push(orObj); } ioObj.chainSend('remove', packet); }; View.prototype._handleChainIO_UpsertPackets = function (ioObj, chainPacket, sharedData) { var data = this._data, primaryIndex = data._primaryIndex, primaryCrc = data._primaryCrc, pk = sharedData.pk, dataArr = sharedData.dataArr, arrItem, insertArr = [], updateArr = [], query, i; // Let's work out what type of operation this data should // generate between an insert or an update. for (i = 0; i < dataArr.length; i++) { arrItem = dataArr[i]; // Check if the data already exists in the data if (primaryIndex.get(arrItem[pk])) { // Matching item exists, check if the data is the same if (primaryCrc.get(arrItem[pk]) !== this.hash(arrItem[pk])) { // The document exists in the data collection but data differs, update required updateArr.push(arrItem); } } else { // The document is missing from this collection, insert required insertArr.push(arrItem); } } if (insertArr.length) { ioObj.chainSend('insert', { dataSet: insertArr }); } if (updateArr.length) { for (i = 0; i < updateArr.length; i++) { arrItem = updateArr[i]; query = {}; query[pk] = arrItem[pk]; ioObj.chainSend('update', { query: query, update: arrItem, dataSet: [arrItem] }); } } }; /** * Executes an insert against the view's underlying data-source. * @see Collection::insert() */ View.prototype.insert = function () { this._from.insert.apply(this._from, arguments); }; /** * Executes an update against the view's underlying data-source. * @see Collection::update() */ View.prototype.update = function (query, update, options, callback) { var finalQuery = { $and: [this.query(), query] }; this._from.update.call(this._from, finalQuery, update, options, callback); }; /** * Executes an updateById against the view's underlying data-source. * @see Collection::updateById() */ View.prototype.updateById = function () { this._from.updateById.apply(this._from, arguments); }; /** * Executes a remove against the view's underlying data-source. * @see Collection::remove() */ View.prototype.remove = function () { this._from.remove.apply(this._from, arguments); }; /** * Queries the view data. * @see Collection::find() * @returns {Array} The result of the find query. */ View.prototype.find = function (query, options) { return this._data.find(query, options); }; /** * Queries the view data for a single document. * @see Collection::findOne() * @returns {Object} The result of the find query. */ View.prototype.findOne = function (query, options) { return this._data.findOne(query, options); }; /** * Queries the view data by specific id. * @see Collection::findById() * @returns {Array} The result of the find query. */ View.prototype.findById = function (id, options) { return this._data.findById(id, options); }; /** * Queries the view data in a sub-array. * @see Collection::findSub() * @returns {Array} The result of the find query. */ View.prototype.findSub = function (match, path, subDocQuery, subDocOptions) { return this._data.findSub(match, path, subDocQuery, subDocOptions); }; View.prototype._findSub = function (docArr, path, subDocQuery, subDocOptions) { return this._data._findSub(docArr, path, subDocQuery, subDocOptions); }; /** * Queries the view data in a sub-array and returns first match. * @see Collection::findSubOne() * @returns {Object} The result of the find query. */ View.prototype.findSubOne = function (match, path, subDocQuery, subDocOptions) { return this._data.findSubOne(match, path, subDocQuery, subDocOptions); }; View.prototype._findSubOne = function (docArr, path, subDocQuery, subDocOptions) { return this._data._findSubOne(docArr, path, subDocQuery, subDocOptions); }; /** * Gets the module's internal data collection. * @returns {Collection} */ View.prototype.data = function () { return this._data; }; /** * Sets the source from which the view will assemble its data. * @param {Collection|View} source The source to use to assemble view data. * @param {Function=} callback A callback method. * @returns {*} If no argument is passed, returns the current value of from, * otherwise returns itself for chaining. */ View.prototype.from = function (source, callback) { var self = this; if (source !== undefined) { // Check if we have an existing from if (this._from) { // Remove the listener to the drop event this._from.off('drop', this._collectionDroppedWrap); // Remove the current reference to the _from since we // are about to replace it with a new one delete this._from; } // Check if we have an existing reactor io that links the // previous _from source to the view's internal data if (this._io) { // Drop the io and remove it this._io.drop(); delete this._io; } // Check if we were passed a source name rather than a // reference to a source object if (typeof(source) === 'string') { // We were passed a name, assume it is a collection and // get the reference to the collection of that name source = this._db.collection(source); } // Check if we were passed a reference to a view rather than // a collection. Views need to be handled slightly differently // since their data is stored in an internal data collection // rather than actually being a direct data source themselves. if (source.className === 'View') { // The source is a view so IO to the internal data collection // instead of the view proper source = source._data; if (this.debug()) { console.log(this.logIdentifier() + ' Using internal data "' + source.instanceIdentifier() + '" for IO graph linking'); } } // Assign the new data source as the view's _from this._from = source; // Hook the new data source's drop event so we can unhook // it as a data source if it gets dropped. This is important // so that we don't run into problems using a dropped source // for active data. this._from.on('drop', this._collectionDroppedWrap); // Create a new reactor IO graph node that intercepts chain packets from the // view's _from source and determines how they should be interpreted by // this view. See the _handleChainIO() method which does all the chain packet // processing for the view. this._io = new ReactorIO(this._from, this, function (chainPacket) { return self._handleChainIO.call(this, chainPacket, self); }); // Set the view's internal data primary key to the same as the // current active _from data source this._data.primaryKey(source.primaryKey()); // Do the initial data lookup and populate the view's internal data // since at this point we don't actually have any data in the view // yet. var collData = source.find(this._querySettings.query, this._querySettings.options); this._data.setData(collData, {}, callback); // If we have an active query and that query has an $orderBy clause, // update our active bucket which allows us to keep track of where // data should be placed in our internal data array. This is about // ordering of data and making sure that we maintain an ordered array // so that if we have data-binding we can place an item in the data- // bound view at the correct location. Active buckets use quick-sort // algorithms to quickly determine the position of an item inside an // existing array based on a sort protocol. if (this._querySettings.options && this._querySettings.options.$orderBy) { this.rebuildActiveBucket(this._querySettings.options.$orderBy); } else { this.rebuildActiveBucket(); } return this; } return this._from; }; /** * The chain reaction handler method for the view. * @param {Object} chainPacket The chain reaction packet to handle. * @private */ View.prototype._chainHandler = function (chainPacket) { var //self = this, arr, count, index, insertIndex, updates, primaryKey, item, currentIndex; if (this.debug()) { console.log(this.logIdentifier() + ' Received chain reactor data: ' + chainPacket.type); } switch (chainPacket.type) { case 'setData': if (this.debug()) { console.log(this.logIdentifier() + ' Setting data in underlying (internal) view collection "' + this._data.name() + '"'); } // Get the new data from our underlying data source sorted as we want var collData = this._from.find(this._querySettings.query, this._querySettings.options); this._data.setData(collData); // Rebuild active bucket as well this.rebuildActiveBucket(this._querySettings.options); break; case 'insert': if (this.debug()) { console.log(this.logIdentifier() + ' Inserting some data into underlying (internal) view collection "' + this._data.name() + '"'); } // Decouple the data to ensure we are working with our own copy chainPacket.data.dataSet = this.decouple(chainPacket.data.dataSet); // Make sure we are working with an array if (!(chainPacket.data.dataSet instanceof Array)) { chainPacket.data.dataSet = [chainPacket.data.dataSet]; } if (this._querySettings.options && this._querySettings.options.$orderBy) { // Loop the insert data and find each item's index arr = chainPacket.data.dataSet; count = arr.length; for (index = 0; index < count; index++) { insertIndex = this._activeBucket.insert(arr[index]); this._data._insertHandle(arr[index], insertIndex); } } else { // Set the insert index to the passed index, or if none, the end of the view data array insertIndex = this._data._data.length; this._data._insertHandle(chainPacket.data.dataSet, insertIndex); } break; case 'update': if (this.debug()) { console.log(this.logIdentifier() + ' Updating some data in underlying (internal) view collection "' + this._data.name() + '"'); } primaryKey = this._data.primaryKey(); // Do the update updates = this._data._handleUpdate( chainPacket.data.query, chainPacket.data.update, chainPacket.data.options ); if (this._querySettings.options && this._querySettings.options.$orderBy) { // TODO: This would be a good place to improve performance by somehow // TODO: inspecting the change that occurred when update was performed // TODO: above and determining if it affected the order clause keys // TODO: and if not, skipping the active bucket updates here // Loop the updated items and work out their new sort locations count = updates.length; for (index = 0; index < count; index++) { item = updates[index]; // Remove the item from the active bucket (via it's id) currentIndex = this._activeBucket.remove(item); // Get the current location of the item //currentIndex = this._data._data.indexOf(item); // Add the item back in to the active bucket insertIndex = this._activeBucket.insert(item); if (currentIndex !== insertIndex) { // Move the updated item to the new index this._data._updateSpliceMove(this._data._data, currentIndex, insertIndex); } } } break; case 'remove': if (this.debug()) { console.log(this.logIdentifier() + ' Removing some data from underlying (internal) view collection "' + this._data.name() + '"'); } this._data.remove(chainPacket.data.query, chainPacket.options); if (this._querySettings.options && this._querySettings.options.$orderBy) { // Loop the dataSet and remove the objects from the ActiveBucket arr = chainPacket.data.dataSet; count = arr.length; for (index = 0; index < count; index++) { this._activeBucket.remove(arr[index]); } } break; default: break; } }; /** * Handles when an underlying collection the view is using as a data * source is dropped. * @param {Collection} collection The collection that has been dropped. * @private */ View.prototype._collectionDropped = function (collection) { if (collection) { // Collection was dropped, remove from view delete this._from; } }; /** * Creates an index on the view. * @see Collection::ensureIndex() * @returns {*} */ View.prototype.ensureIndex = function () { return this._data.ensureIndex.apply(this._data, arguments); }; /** /** * Listens for an event. * @see Mixin.Events::on() */ View.prototype.on = function () { return this._data.on.apply(this._data, arguments); }; /** * Cancels an event listener. * @see Mixin.Events::off() */ View.prototype.off = function () { return this._data.off.apply(this._data, arguments); }; /** * Emits an event. * @see Mixin.Events::emit() */ View.prototype.emit = function () { return this._data.emit.apply(this._data, arguments); }; /** * Emits an event. * @see Mixin.Events::deferEmit() */ View.prototype.deferEmit = function () { return this._data.deferEmit.apply(this._data, arguments); }; /** * Find the distinct values for a specified field across a single collection and * returns the results in an array. * @param {String} key The field path to return distinct values for e.g. "person.name". * @param {Object=} query The query to use to filter the documents used to return values from. * @param {Object=} options The query options to use when running the query. * @returns {Array} */ View.prototype.distinct = function (key, query, options) { return this._data.distinct(key, query, options); }; /** * Gets the primary key for this view from the assigned collection. * @see Collection::primaryKey() * @returns {String} */ View.prototype.primaryKey = function () { return this._data.primaryKey(); }; /** * @see Mixin.Triggers::addTrigger() */ View.prototype.addTrigger = function () { return this._data.addTrigger.apply(this._data, arguments); }; /** * @see Mixin.Triggers::removeTrigger() */ View.prototype.removeTrigger = function () { return this._data.removeTrigger.apply(this._data, arguments); }; /** * @see Mixin.Triggers::ignoreTriggers() */ View.prototype.ignoreTriggers = function () { return this._data.ignoreTriggers.apply(this._data, arguments); }; /** * @see Mixin.Triggers::addLinkIO() */ View.prototype.addLinkIO = function () { return this._data.addLinkIO.apply(this._data, arguments); }; /** * @see Mixin.Triggers::removeLinkIO() */ View.prototype.removeLinkIO = function () { return this._data.removeLinkIO.apply(this._data, arguments); }; /** * @see Mixin.Triggers::willTrigger() */ View.prototype.willTrigger = function () { return this._data.willTrigger.apply(this._data, arguments); }; /** * @see Mixin.Triggers::processTrigger() */ View.prototype.processTrigger = function () { return this._data.processTrigger.apply(this._data, arguments); }; /** * @see Mixin.Triggers::_triggerIndexOf() */ View.prototype._triggerIndexOf = function () { return this._data._triggerIndexOf.apply(this._data, arguments); }; /** * Drops a view and all it's stored data from the database. * @returns {boolean} True on success, false on failure. */ View.prototype.drop = function (callback) { if (!this.isDropped()) { if (this._from) { this._from.off('drop', this._collectionDroppedWrap); this._from._removeView(this); } if (this.debug() || (this._db && this._db.debug())) { console.log(this.logIdentifier() + ' Dropping'); } this._state = 'dropped'; // Clear io and chains if (this._io) { this._io.drop(); } // Drop the view's internal collection if (this._data) { this._data.drop(); } if (this._db && this._name) { delete this._db._view[this._name]; } this.emit('drop', this); if (callback) { callback(false, true); } delete this._chain; delete this._from; delete this._data; delete this._io; delete this._listeners; delete this._querySettings; delete this._db; return true; } return false; }; /** * Gets / sets the query object and query options that the view uses * to build it's data set. This call modifies both the query and * query options at the same time. * @param {Object=} query The query to set. * @param {Boolean=} options The query options object. * @param {Boolean=} refresh Whether to refresh the view data after * this operation. Defaults to true. * @returns {*} * @deprecated Use query(, , ) instead. Query * now supports being presented with multiple different variations of * arguments. */ View.prototype.queryData = function (query, options, refresh) { if (query !== undefined) { this._querySettings.query = query; if (query.$findSub && !query.$findSub.$from) { query.$findSub.$from = this._data.name(); } if (query.$findSubOne && !query.$findSubOne.$from) { query.$findSubOne.$from = this._data.name(); } } if (options !== undefined) { this._querySettings.options = options; } if (query !== undefined || options !== undefined) { if (refresh === undefined || refresh === true) { this.refresh(); } } if (query !== undefined) { this.emit('queryChange', query); } if (options !== undefined) { this.emit('queryOptionsChange', options); } if (query !== undefined || options !== undefined) { return this; } return this.decouple(this._querySettings); }; /** * Add data to the existing query. * @param {Object} obj The data whose keys will be added to the existing * query object. * @param {Boolean} overwrite Whether or not to overwrite data that already * exists in the query object. Defaults to true. * @param {Boolean=} refresh Whether or not to refresh the view data set * once the operation is complete. Defaults to true. */ View.prototype.queryAdd = function (obj, overwrite, refresh) { this._querySettings.query = this._querySettings.query || {}; var query = this._querySettings.query, i; if (obj !== undefined) { // Loop object properties and add to existing query for (i in obj) { if (obj.hasOwnProperty(i)) { if (query[i] === undefined || (query[i] !== undefined && overwrite !== false)) { query[i] = obj[i]; } } } } if (refresh === undefined || refresh === true) { this.refresh(); } if (query !== undefined) { this.emit('queryChange', query); } }; /** * Remove data from the existing query. * @param {Object} obj The data whose keys will be removed from the existing * query object. * @param {Boolean=} refresh Whether or not to refresh the view data set * once the operation is complete. Defaults to true. */ View.prototype.queryRemove = function (obj, refresh) { var query = this._querySettings.query, i; if (query) { if (obj !== undefined) { // Loop object properties and add to existing query for (i in obj) { if (obj.hasOwnProperty(i)) { delete query[i]; } } } if (refresh === undefined || refresh === true) { this.refresh(); } if (query !== undefined) { this.emit('queryChange', query); } } }; /** * Gets / sets the query being used to generate the view data. It * does not change or modify the view's query options. * @param {Object=} query The query to set. * @param {Boolean=} refresh Whether to refresh the view data after * this operation. Defaults to true. * @returns {*} */ View.prototype.query = new Overload({ '': function () { return this.decouple(this._querySettings.query); }, 'object': function (query) { return this.$main.call(this, query, undefined, true); }, '*, boolean': function (query, refresh) { return this.$main.call(this, query, undefined, refresh); }, 'object, object': function (query, options) { return this.$main.call(this, query, options, true); }, '*, *, boolean': function (query, options, refresh) { return this.$main.call(this, query, options, refresh); }, '$main': function (query, options, refresh) { if (query !== undefined) { this._querySettings.query = query; if (query.$findSub && !query.$findSub.$from) { query.$findSub.$from = this._data.name(); } if (query.$findSubOne && !query.$findSubOne.$from) { query.$findSubOne.$from = this._data.name(); } } if (options !== undefined) { this._querySettings.options = options; } if (query !== undefined || options !== undefined) { if (refresh === undefined || refresh === true) { this.refresh(); } } if (query !== undefined) { this.emit('queryChange', query); } if (options !== undefined) { this.emit('queryOptionsChange', options); } if (query !== undefined || options !== undefined) { return this; } return this.decouple(this._querySettings); } }); /** * Gets / sets the orderBy clause in the query options for the view. * @param {Object=} val The order object. * @returns {*} */ View.prototype.orderBy = function (val) { if (val !== undefined) { var queryOptions = this.queryOptions() || {}; queryOptions.$orderBy = val; this.queryOptions(queryOptions); return this; } return (this.queryOptions() || {}).$orderBy; }; /** * Gets / sets the page clause in the query options for the view. * @param {Number=} val The page number to change to (zero index). * @returns {*} */ View.prototype.page = function (val) { if (val !== undefined) { var queryOptions = this.queryOptions() || {}; // Only execute a query options update if page has changed if (val !== queryOptions.$page) { queryOptions.$page = val; this.queryOptions(queryOptions); } return this; } return (this.queryOptions() || {}).$page; }; /** * Jump to the first page in the data set. * @returns {*} */ View.prototype.pageFirst = function () { return this.page(0); }; /** * Jump to the last page in the data set. * @returns {*} */ View.prototype.pageLast = function () { var pages = this.cursor().pages, lastPage = pages !== undefined ? pages : 0; return this.page(lastPage - 1); }; /** * Move forward or backwards in the data set pages by passing a positive * or negative integer of the number of pages to move. * @param {Number} val The number of pages to move. * @returns {*} */ View.prototype.pageScan = function (val) { if (val !== undefined) { var pages = this.cursor().pages, queryOptions = this.queryOptions() || {}, currentPage = queryOptions.$page !== undefined ? queryOptions.$page : 0; currentPage += val; if (currentPage < 0) { currentPage = 0; } if (currentPage >= pages) { currentPage = pages - 1; } return this.page(currentPage); } }; /** * Gets / sets the query options used when applying sorting etc to the * view data set. * @param {Object=} options An options object. * @param {Boolean=} refresh Whether to refresh the view data after * this operation. Defaults to true. * @returns {*} */ View.prototype.queryOptions = function (options, refresh) { if (options !== undefined) { this._querySettings.options = options; if (options.$decouple === undefined) { options.$decouple = true; } if (refresh === undefined || refresh === true) { this.refresh(); } else { // TODO: This could be wasteful if the previous options $orderBy was identical, do a hash and check first! this.rebuildActiveBucket(options.$orderBy); } if (options !== undefined) { this.emit('queryOptionsChange', options); } return this; } return this._querySettings.options; }; /** * Clears the existing active bucket and builds a new one based * on the passed orderBy object (if one is passed). * @param {Object=} orderBy The orderBy object describing how to * order any data. */ View.prototype.rebuildActiveBucket = function (orderBy) { if (orderBy) { var arr = this._data._data, arrCount = arr.length; // Build a new active bucket this._activeBucket = new ActiveBucket(orderBy); this._activeBucket.primaryKey(this._data.primaryKey()); // Loop the current view data and add each item for (var i = 0; i < arrCount; i++) { this._activeBucket.insert(arr[i]); } } else { // Remove any existing active bucket delete this._activeBucket; } }; /** * Refreshes the view data such as ordering etc. */ View.prototype.refresh = function () { var self = this, refreshResults, joinArr, i, k; if (this._from) { // Clear the private data collection which will propagate to the public data // collection automatically via the chain reactor node between them this._data.remove(); // Grab all the data from the underlying data source refreshResults = this._from.find(this._querySettings.query, this._querySettings.options); this.cursor(refreshResults.$cursor); // Insert the underlying data into the private data collection this._data.insert(refreshResults); // Store the current cursor data this._data._data.$cursor = refreshResults.$cursor; } if (this._querySettings && this._querySettings.options && this._querySettings.options.$join && this._querySettings.options.$join.length) { // Define the change handler method self.__joinChange = self.__joinChange || function () { self._joinChange(); }; // Check for existing join collections if (this._joinCollections && this._joinCollections.length) { // Loop the join collections and remove change listeners // Loop the collections and hook change events for (i = 0; i < this._joinCollections.length; i++) { this._db.collection(this._joinCollections[i]).off('immediateChange', self.__joinChange); } } // Now start hooking any new / existing joins joinArr = this._querySettings.options.$join; this._joinCollections = []; // Loop the joined collections and hook change events for (i = 0; i < joinArr.length; i++) { for (k in joinArr[i]) { if (joinArr[i].hasOwnProperty(k)) { this._joinCollections.push(k); } } } if (this._joinCollections.length) { // Loop the collections and hook change events for (i = 0; i < this._joinCollections.length; i++) { this._db.collection(this._joinCollections[i]).on('immediateChange', self.__joinChange); } } } if (this._querySettings.options && this._querySettings.options.$orderBy) { this.rebuildActiveBucket(this._querySettings.options.$orderBy); } else { this.rebuildActiveBucket(); } return this; }; /** * Handles when a change has occurred on a collection that is joined * by query to this view. * @param objName * @param objType * @private */ View.prototype._joinChange = function (objName, objType) { this.emit('joinChange'); // TODO: This is a really dirty solution because it will require a complete // TODO: rebuild of the view data. We need to implement an IO handler to // TODO: selectively update the data of the view based on the joined // TODO: collection data operation. // FIXME: This isnt working, major performance killer, invest in some IO from chain reactor to make this a targeted call this.refresh(); }; /** * Returns the number of documents currently in the view. * @returns {Number} */ View.prototype.count = function () { return this._data.count.apply(this._data, arguments); }; // Call underlying View.prototype.subset = function () { return this._data.subset.apply(this._data, arguments); }; /** * Takes the passed data and uses it to set transform methods and globally * enable or disable the transform system for the view. * @param {Object} obj The new transform system settings "enabled", "dataIn" * and "dataOut": * { * "enabled": true, * "dataIn": function (data) { return data; }, * "dataOut": function (data) { return data; } * } * @returns {*} */ View.prototype.transform = function (obj) { var currentSettings, newSettings; currentSettings = this._data.transform(); this._data.transform(obj); newSettings = this._data.transform(); // Check if transforms are enabled, a dataIn method is set and these // settings did not match the previous transform settings if (newSettings.enabled && newSettings.dataIn && (currentSettings.enabled !== newSettings.enabled || currentSettings.dataIn !== newSettings.dataIn)) { // The data in the view is now stale, refresh it this.refresh(); } return newSettings; }; /** * Executes a method against each document that matches query and returns an * array of documents that may have been modified by the method. * @param {Object} query The query object. * @param {Function} func The method that each document is passed to. If this method * returns false for a particular document it is excluded from the results. * @param {Object=} options Optional options object. * @returns {Array} */ View.prototype.filter = function (query, func, options) { return this._data.filter(query, func, options); }; /** * Returns the non-transformed data the view holds as a collection * reference. * @return {Collection} The non-transformed collection reference. */ View.prototype.data = function () { return this._data; }; /** * @see Collection.indexOf * @returns {*} */ View.prototype.indexOf = function () { return this._data.indexOf.apply(this._data, arguments); }; /** * Gets / sets the db instance this class instance belongs to. * @param {Db=} db The db instance. * @memberof View * @returns {*} */ Shared.synthesize(View.prototype, 'db', function (db) { if (db) { this._data.db(db); // Apply the same debug settings this.debug(db.debug()); this._data.debug(db.debug()); } return this.$super.apply(this, arguments); }); /** * Gets / sets the current state. * @param {String=} val The name of the state to set. * @returns {*} */ Shared.synthesize(View.prototype, 'state'); /** * Gets / sets the current name. * @param {String=} val The new name to set. * @returns {*} */ Shared.synthesize(View.prototype, 'name'); /** * Gets / sets the current cursor. * @param {String=} val The new cursor to set. * @returns {*} */ Shared.synthesize(View.prototype, 'cursor', function (val) { if (val === undefined) { return this._cursor || {}; } this.$super.apply(this, arguments); }); // Extend collection with view init Collection.prototype.init = function () { this._view = []; CollectionInit.apply(this, arguments); }; /** * Creates a view and assigns the collection as its data source. * @param {String} name The name of the new view. * @param {Object} query The query to apply to the new view. * @param {Object} options The options object to apply to the view. * @returns {*} */ Collection.prototype.view = function (name, query, options) { if (this._db && this._db._view ) { if (!this._db._view[name]) { var view = new View(name, query, options) .db(this._db) .from(this); this._view = this._view || []; this._view.push(view); return view; } else { throw(this.logIdentifier() + ' Cannot create a view using this collection because a view with this name already exists: ' + name); } } }; /** * Adds a view to the internal view lookup. * @param {View} view The view to add. * @returns {Collection} * @private */ Collection.prototype._addView = CollectionGroup.prototype._addView = function (view) { if (view !== undefined) { this._view.push(view); } return this; }; /** * Removes a view from the internal view lookup. * @param {View} view The view to remove. * @returns {Collection} * @private */ Collection.prototype._removeView = CollectionGroup.prototype._removeView = function (view) { if (view !== undefined) { var index = this._view.indexOf(view); if (index > -1) { this._view.splice(index, 1); } } return this; }; // Extend DB with views init Db.prototype.init = function () { this._view = {}; DbInit.apply(this, arguments); }; /** * Gets a view by it's name. * @param {String} name The name of the view to retrieve. * @returns {*} */ Db.prototype.view = function (name) { var self = this; // Handle being passed an instance if (name instanceof View) { return name; } if (this._view[name]) { return this._view[name]; } if (this.debug() || (this._db && this._db.debug())) { console.log(this.logIdentifier() + ' Creating view ' + name); } this._view[name] = new View(name).db(this); self.deferEmit('create', self._view[name], 'view', name); return this._view[name]; }; /** * Determine if a view with the passed name already exists. * @param {String} name The name of the view to check for. * @returns {boolean} */ Db.prototype.viewExists = function (name) { return Boolean(this._view[name]); }; /** * Returns an array of views the DB currently has. * @returns {Array} An array of objects containing details of each view * the database is currently managing. */ Db.prototype.views = function () { var arr = [], view, i; for (i in this._view) { if (this._view.hasOwnProperty(i)) { view = this._view[i]; arr.push({ name: i, count: view.count(), linked: view.isLinked !== undefined ? view.isLinked() : false }); } } return arr; }; Shared.finishModule('View'); module.exports = View; },{"./ActiveBucket":3,"./Collection":6,"./CollectionGroup":7,"./Overload":32,"./ReactorIO":38,"./Shared":40}],43:[function(_dereq_,module,exports){ (function (process,global){ (function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : typeof define === 'function' && define.amd ? define(['exports'], factory) : (factory((global.async = global.async || {}))); }(this, (function (exports) { 'use strict'; function slice(arrayLike, start) { start = start|0; var newLen = Math.max(arrayLike.length - start, 0); var newArr = Array(newLen); for(var idx = 0; idx < newLen; idx++) { newArr[idx] = arrayLike[start + idx]; } return newArr; } /** * Creates a continuation function with some arguments already applied. * * Useful as a shorthand when combined with other control flow functions. Any * arguments passed to the returned function are added to the arguments * originally passed to apply. * * @name apply * @static * @memberOf module:Utils * @method * @category Util * @param {Function} fn - The function you want to eventually apply all * arguments to. Invokes with (arguments...). * @param {...*} arguments... - Any number of arguments to automatically apply * when the continuation is called. * @returns {Function} the partially-applied function * @example * * // using apply * async.parallel([ * async.apply(fs.writeFile, 'testfile1', 'test1'), * async.apply(fs.writeFile, 'testfile2', 'test2') * ]); * * * // the same process without using apply * async.parallel([ * function(callback) { * fs.writeFile('testfile1', 'test1', callback); * }, * function(callback) { * fs.writeFile('testfile2', 'test2', callback); * } * ]); * * // It's possible to pass any number of additional arguments when calling the * // continuation: * * node> var fn = async.apply(sys.puts, 'one'); * node> fn('two', 'three'); * one * two * three */ var apply = function(fn/*, ...args*/) { var args = slice(arguments, 1); return function(/*callArgs*/) { var callArgs = slice(arguments); return fn.apply(null, args.concat(callArgs)); }; }; var initialParams = function (fn) { return function (/*...args, callback*/) { var args = slice(arguments); var callback = args.pop(); fn.call(this, args, callback); }; }; /** * Checks if `value` is the * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is an object, else `false`. * @example * * _.isObject({}); * // => true * * _.isObject([1, 2, 3]); * // => true * * _.isObject(_.noop); * // => true * * _.isObject(null); * // => false */ function isObject(value) { var type = typeof value; return value != null && (type == 'object' || type == 'function'); } var hasSetImmediate = typeof setImmediate === 'function' && setImmediate; var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; function fallback(fn) { setTimeout(fn, 0); } function wrap(defer) { return function (fn/*, ...args*/) { var args = slice(arguments, 1); defer(function () { fn.apply(null, args); }); }; } var _defer; if (hasSetImmediate) { _defer = setImmediate; } else if (hasNextTick) { _defer = process.nextTick; } else { _defer = fallback; } var setImmediate$1 = wrap(_defer); /** * Take a sync function and make it async, passing its return value to a * callback. This is useful for plugging sync functions into a waterfall, * series, or other async functions. Any arguments passed to the generated * function will be passed to the wrapped function (except for the final * callback argument). Errors thrown will be passed to the callback. * * If the function passed to `asyncify` returns a Promise, that promises's * resolved/rejected state will be used to call the callback, rather than simply * the synchronous return value. * * This also means you can asyncify ES2017 `async` functions. * * @name asyncify * @static * @memberOf module:Utils * @method * @alias wrapSync * @category Util * @param {Function} func - The synchronous function, or Promise-returning * function to convert to an {@link AsyncFunction}. * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be * invoked with `(args..., callback)`. * @example * * // passing a regular synchronous function * async.waterfall([ * async.apply(fs.readFile, filename, "utf8"), * async.asyncify(JSON.parse), * function (data, next) { * // data is the result of parsing the text. * // If there was a parsing error, it would have been caught. * } * ], callback); * * // passing a function returning a promise * async.waterfall([ * async.apply(fs.readFile, filename, "utf8"), * async.asyncify(function (contents) { * return db.model.create(contents); * }), * function (model, next) { * // `model` is the instantiated model object. * // If there was an error, this function would be skipped. * } * ], callback); * * // es2017 example, though `asyncify` is not needed if your JS environment * // supports async functions out of the box * var q = async.queue(async.asyncify(async function(file) { * var intermediateStep = await processFile(file); * return await somePromise(intermediateStep) * })); * * q.push(files); */ function asyncify(func) { return initialParams(function (args, callback) { var result; try { result = func.apply(this, args); } catch (e) { return callback(e); } // if result is Promise object if (isObject(result) && typeof result.then === 'function') { result.then(function(value) { invokeCallback(callback, null, value); }, function(err) { invokeCallback(callback, err.message ? err : new Error(err)); }); } else { callback(null, result); } }); } function invokeCallback(callback, error, value) { try { callback(error, value); } catch (e) { setImmediate$1(rethrow, e); } } function rethrow(error) { throw error; } var supportsSymbol = typeof Symbol === 'function'; function isAsync(fn) { return supportsSymbol && fn[Symbol.toStringTag] === 'AsyncFunction'; } function wrapAsync(asyncFn) { return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn; } function applyEach$1(eachfn) { return function(fns/*, ...args*/) { var args = slice(arguments, 1); var go = initialParams(function(args, callback) { var that = this; return eachfn(fns, function (fn, cb) { wrapAsync(fn).apply(that, args.concat(cb)); }, callback); }); if (args.length) { return go.apply(this, args); } else { return go; } }; } /** Detect free variable `global` from Node.js. */ var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; /** Detect free variable `self`. */ var freeSelf = typeof self == 'object' && self && self.Object === Object && self; /** Used as a reference to the global object. */ var root = freeGlobal || freeSelf || Function('return this')(); /** Built-in value references. */ var Symbol$1 = root.Symbol; /** Used for built-in method references. */ var objectProto = Object.prototype; /** Used to check objects for own properties. */ var hasOwnProperty = objectProto.hasOwnProperty; /** * Used to resolve the * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) * of values. */ var nativeObjectToString = objectProto.toString; /** Built-in value references. */ var symToStringTag$1 = Symbol$1 ? Symbol$1.toStringTag : undefined; /** * A specialized version of `baseGetTag` which ignores `Symbol.toStringTag` values. * * @private * @param {*} value The value to query. * @returns {string} Returns the raw `toStringTag`. */ function getRawTag(value) { var isOwn = hasOwnProperty.call(value, symToStringTag$1), tag = value[symToStringTag$1]; try { value[symToStringTag$1] = undefined; var unmasked = true; } catch (e) {} var result = nativeObjectToString.call(value); if (unmasked) { if (isOwn) { value[symToStringTag$1] = tag; } else { delete value[symToStringTag$1]; } } return result; } /** Used for built-in method references. */ var objectProto$1 = Object.prototype; /** * Used to resolve the * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) * of values. */ var nativeObjectToString$1 = objectProto$1.toString; /** * Converts `value` to a string using `Object.prototype.toString`. * * @private * @param {*} value The value to convert. * @returns {string} Returns the converted string. */ function objectToString(value) { return nativeObjectToString$1.call(value); } /** `Object#toString` result references. */ var nullTag = '[object Null]'; var undefinedTag = '[object Undefined]'; /** Built-in value references. */ var symToStringTag = Symbol$1 ? Symbol$1.toStringTag : undefined; /** * The base implementation of `getTag` without fallbacks for buggy environments. * * @private * @param {*} value The value to query. * @returns {string} Returns the `toStringTag`. */ function baseGetTag(value) { if (value == null) { return value === undefined ? undefinedTag : nullTag; } return (symToStringTag && symToStringTag in Object(value)) ? getRawTag(value) : objectToString(value); } /** `Object#toString` result references. */ var asyncTag = '[object AsyncFunction]'; var funcTag = '[object Function]'; var genTag = '[object GeneratorFunction]'; var proxyTag = '[object Proxy]'; /** * Checks if `value` is classified as a `Function` object. * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a function, else `false`. * @example * * _.isFunction(_); * // => true * * _.isFunction(/abc/); * // => false */ function isFunction(value) { if (!isObject(value)) { return false; } // The use of `Object#toString` avoids issues with the `typeof` operator // in Safari 9 which returns 'object' for typed arrays and other constructors. var tag = baseGetTag(value); return tag == funcTag || tag == genTag || tag == asyncTag || tag == proxyTag; } /** Used as references for various `Number` constants. */ var MAX_SAFE_INTEGER = 9007199254740991; /** * Checks if `value` is a valid array-like length. * * **Note:** This method is loosely based on * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a valid length, else `false`. * @example * * _.isLength(3); * // => true * * _.isLength(Number.MIN_VALUE); * // => false * * _.isLength(Infinity); * // => false * * _.isLength('3'); * // => false */ function isLength(value) { return typeof value == 'number' && value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER; } /** * Checks if `value` is array-like. A value is considered array-like if it's * not a function and has a `value.length` that's an integer greater than or * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is array-like, else `false`. * @example * * _.isArrayLike([1, 2, 3]); * // => true * * _.isArrayLike(document.body.children); * // => true * * _.isArrayLike('abc'); * // => true * * _.isArrayLike(_.noop); * // => false */ function isArrayLike(value) { return value != null && isLength(value.length) && !isFunction(value); } // A temporary value used to identify if the loop should be broken. // See #1064, #1293 var breakLoop = {}; /** * This method returns `undefined`. * * @static * @memberOf _ * @since 2.3.0 * @category Util * @example * * _.times(2, _.noop); * // => [undefined, undefined] */ function noop() { // No operation performed. } function once(fn) { return function () { if (fn === null) return; var callFn = fn; fn = null; callFn.apply(this, arguments); }; } var iteratorSymbol = typeof Symbol === 'function' && Symbol.iterator; var getIterator = function (coll) { return iteratorSymbol && coll[iteratorSymbol] && coll[iteratorSymbol](); }; /** * The base implementation of `_.times` without support for iteratee shorthands * or max array length checks. * * @private * @param {number} n The number of times to invoke `iteratee`. * @param {Function} iteratee The function invoked per iteration. * @returns {Array} Returns the array of results. */ function baseTimes(n, iteratee) { var index = -1, result = Array(n); while (++index < n) { result[index] = iteratee(index); } return result; } /** * Checks if `value` is object-like. A value is object-like if it's not `null` * and has a `typeof` result of "object". * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is object-like, else `false`. * @example * * _.isObjectLike({}); * // => true * * _.isObjectLike([1, 2, 3]); * // => true * * _.isObjectLike(_.noop); * // => false * * _.isObjectLike(null); * // => false */ function isObjectLike(value) { return value != null && typeof value == 'object'; } /** `Object#toString` result references. */ var argsTag = '[object Arguments]'; /** * The base implementation of `_.isArguments`. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is an `arguments` object, */ function baseIsArguments(value) { return isObjectLike(value) && baseGetTag(value) == argsTag; } /** Used for built-in method references. */ var objectProto$3 = Object.prototype; /** Used to check objects for own properties. */ var hasOwnProperty$2 = objectProto$3.hasOwnProperty; /** Built-in value references. */ var propertyIsEnumerable = objectProto$3.propertyIsEnumerable; /** * Checks if `value` is likely an `arguments` object. * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is an `arguments` object, * else `false`. * @example * * _.isArguments(function() { return arguments; }()); * // => true * * _.isArguments([1, 2, 3]); * // => false */ var isArguments = baseIsArguments(function() { return arguments; }()) ? baseIsArguments : function(value) { return isObjectLike(value) && hasOwnProperty$2.call(value, 'callee') && !propertyIsEnumerable.call(value, 'callee'); }; /** * Checks if `value` is classified as an `Array` object. * * @static * @memberOf _ * @since 0.1.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is an array, else `false`. * @example * * _.isArray([1, 2, 3]); * // => true * * _.isArray(document.body.children); * // => false * * _.isArray('abc'); * // => false * * _.isArray(_.noop); * // => false */ var isArray = Array.isArray; /** * This method returns `false`. * * @static * @memberOf _ * @since 4.13.0 * @category Util * @returns {boolean} Returns `false`. * @example * * _.times(2, _.stubFalse); * // => [false, false] */ function stubFalse() { return false; } /** Detect free variable `exports`. */ var freeExports = typeof exports == 'object' && exports && !exports.nodeType && exports; /** Detect free variable `module`. */ var freeModule = freeExports && typeof module == 'object' && module && !module.nodeType && module; /** Detect the popular CommonJS extension `module.exports`. */ var moduleExports = freeModule && freeModule.exports === freeExports; /** Built-in value references. */ var Buffer = moduleExports ? root.Buffer : undefined; /* Built-in method references for those with the same name as other `lodash` methods. */ var nativeIsBuffer = Buffer ? Buffer.isBuffer : undefined; /** * Checks if `value` is a buffer. * * @static * @memberOf _ * @since 4.3.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a buffer, else `false`. * @example * * _.isBuffer(new Buffer(2)); * // => true * * _.isBuffer(new Uint8Array(2)); * // => false */ var isBuffer = nativeIsBuffer || stubFalse; /** Used as references for various `Number` constants. */ var MAX_SAFE_INTEGER$1 = 9007199254740991; /** Used to detect unsigned integer values. */ var reIsUint = /^(?:0|[1-9]\d*)$/; /** * Checks if `value` is a valid array-like index. * * @private * @param {*} value The value to check. * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. */ function isIndex(value, length) { length = length == null ? MAX_SAFE_INTEGER$1 : length; return !!length && (typeof value == 'number' || reIsUint.test(value)) && (value > -1 && value % 1 == 0 && value < length); } /** `Object#toString` result references. */ var argsTag$1 = '[object Arguments]'; var arrayTag = '[object Array]'; var boolTag = '[object Boolean]'; var dateTag = '[object Date]'; var errorTag = '[object Error]'; var funcTag$1 = '[object Function]'; var mapTag = '[object Map]'; var numberTag = '[object Number]'; var objectTag = '[object Object]'; var regexpTag = '[object RegExp]'; var setTag = '[object Set]'; var stringTag = '[object String]'; var weakMapTag = '[object WeakMap]'; var arrayBufferTag = '[object ArrayBuffer]'; var dataViewTag = '[object DataView]'; var float32Tag = '[object Float32Array]'; var float64Tag = '[object Float64Array]'; var int8Tag = '[object Int8Array]'; var int16Tag = '[object Int16Array]'; var int32Tag = '[object Int32Array]'; var uint8Tag = '[object Uint8Array]'; var uint8ClampedTag = '[object Uint8ClampedArray]'; var uint16Tag = '[object Uint16Array]'; var uint32Tag = '[object Uint32Array]'; /** Used to identify `toStringTag` values of typed arrays. */ var typedArrayTags = {}; typedArrayTags[float32Tag] = typedArrayTags[float64Tag] = typedArrayTags[int8Tag] = typedArrayTags[int16Tag] = typedArrayTags[int32Tag] = typedArrayTags[uint8Tag] = typedArrayTags[uint8ClampedTag] = typedArrayTags[uint16Tag] = typedArrayTags[uint32Tag] = true; typedArrayTags[argsTag$1] = typedArrayTags[arrayTag] = typedArrayTags[arrayBufferTag] = typedArrayTags[boolTag] = typedArrayTags[dataViewTag] = typedArrayTags[dateTag] = typedArrayTags[errorTag] = typedArrayTags[funcTag$1] = typedArrayTags[mapTag] = typedArrayTags[numberTag] = typedArrayTags[objectTag] = typedArrayTags[regexpTag] = typedArrayTags[setTag] = typedArrayTags[stringTag] = typedArrayTags[weakMapTag] = false; /** * The base implementation of `_.isTypedArray` without Node.js optimizations. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. */ function baseIsTypedArray(value) { return isObjectLike(value) && isLength(value.length) && !!typedArrayTags[baseGetTag(value)]; } /** * The base implementation of `_.unary` without support for storing metadata. * * @private * @param {Function} func The function to cap arguments for. * @returns {Function} Returns the new capped function. */ function baseUnary(func) { return function(value) { return func(value); }; } /** Detect free variable `exports`. */ var freeExports$1 = typeof exports == 'object' && exports && !exports.nodeType && exports; /** Detect free variable `module`. */ var freeModule$1 = freeExports$1 && typeof module == 'object' && module && !module.nodeType && module; /** Detect the popular CommonJS extension `module.exports`. */ var moduleExports$1 = freeModule$1 && freeModule$1.exports === freeExports$1; /** Detect free variable `process` from Node.js. */ var freeProcess = moduleExports$1 && freeGlobal.process; /** Used to access faster Node.js helpers. */ var nodeUtil = (function() { try { return freeProcess && freeProcess.binding && freeProcess.binding('util'); } catch (e) {} }()); /* Node.js helper references. */ var nodeIsTypedArray = nodeUtil && nodeUtil.isTypedArray; /** * Checks if `value` is classified as a typed array. * * @static * @memberOf _ * @since 3.0.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. * @example * * _.isTypedArray(new Uint8Array); * // => true * * _.isTypedArray([]); * // => false */ var isTypedArray = nodeIsTypedArray ? baseUnary(nodeIsTypedArray) : baseIsTypedArray; /** Used for built-in method references. */ var objectProto$2 = Object.prototype; /** Used to check objects for own properties. */ var hasOwnProperty$1 = objectProto$2.hasOwnProperty; /** * Creates an array of the enumerable property names of the array-like `value`. * * @private * @param {*} value The value to query. * @param {boolean} inherited Specify returning inherited property names. * @returns {Array} Returns the array of property names. */ function arrayLikeKeys(value, inherited) { var isArr = isArray(value), isArg = !isArr && isArguments(value), isBuff = !isArr && !isArg && isBuffer(value), isType = !isArr && !isArg && !isBuff && isTypedArray(value), skipIndexes = isArr || isArg || isBuff || isType, result = skipIndexes ? baseTimes(value.length, String) : [], length = result.length; for (var key in value) { if ((inherited || hasOwnProperty$1.call(value, key)) && !(skipIndexes && ( // Safari 9 has enumerable `arguments.length` in strict mode. key == 'length' || // Node.js 0.10 has enumerable non-index properties on buffers. (isBuff && (key == 'offset' || key == 'parent')) || // PhantomJS 2 has enumerable non-index properties on typed arrays. (isType && (key == 'buffer' || key == 'byteLength' || key == 'byteOffset')) || // Skip index properties. isIndex(key, length) ))) { result.push(key); } } return result; } /** Used for built-in method references. */ var objectProto$5 = Object.prototype; /** * Checks if `value` is likely a prototype object. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a prototype, else `false`. */ function isPrototype(value) { var Ctor = value && value.constructor, proto = (typeof Ctor == 'function' && Ctor.prototype) || objectProto$5; return value === proto; } /** * Creates a unary function that invokes `func` with its argument transformed. * * @private * @param {Function} func The function to wrap. * @param {Function} transform The argument transform. * @returns {Function} Returns the new function. */ function overArg(func, transform) { return function(arg) { return func(transform(arg)); }; } /* Built-in method references for those with the same name as other `lodash` methods. */ var nativeKeys = overArg(Object.keys, Object); /** Used for built-in method references. */ var objectProto$4 = Object.prototype; /** Used to check objects for own properties. */ var hasOwnProperty$3 = objectProto$4.hasOwnProperty; /** * The base implementation of `_.keys` which doesn't treat sparse arrays as dense. * * @private * @param {Object} object The object to query. * @returns {Array} Returns the array of property names. */ function baseKeys(object) { if (!isPrototype(object)) { return nativeKeys(object); } var result = []; for (var key in Object(object)) { if (hasOwnProperty$3.call(object, key) && key != 'constructor') { result.push(key); } } return result; } /** * Creates an array of the own enumerable property names of `object`. * * **Note:** Non-object values are coerced to objects. See the * [ES spec](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) * for more details. * * @static * @since 0.1.0 * @memberOf _ * @category Object * @param {Object} object The object to query. * @returns {Array} Returns the array of property names. * @example * * function Foo() { * this.a = 1; * this.b = 2; * } * * Foo.prototype.c = 3; * * _.keys(new Foo); * // => ['a', 'b'] (iteration order is not guaranteed) * * _.keys('hi'); * // => ['0', '1'] */ function keys(object) { return isArrayLike(object) ? arrayLikeKeys(object) : baseKeys(object); } function createArrayIterator(coll) { var i = -1; var len = coll.length; return function next() { return ++i < len ? {value: coll[i], key: i} : null; } } function createES2015Iterator(iterator) { var i = -1; return function next() { var item = iterator.next(); if (item.done) return null; i++; return {value: item.value, key: i}; } } function createObjectIterator(obj) { var okeys = keys(obj); var i = -1; var len = okeys.length; return function next() { var key = okeys[++i]; return i < len ? {value: obj[key], key: key} : null; }; } function iterator(coll) { if (isArrayLike(coll)) { return createArrayIterator(coll); } var iterator = getIterator(coll); return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); } function onlyOnce(fn) { return function() { if (fn === null) throw new Error("Callback was already called."); var callFn = fn; fn = null; callFn.apply(this, arguments); }; } function _eachOfLimit(limit) { return function (obj, iteratee, callback) { callback = once(callback || noop); if (limit <= 0 || !obj) { return callback(null); } var nextElem = iterator(obj); var done = false; var running = 0; function iterateeCallback(err, value) { running -= 1; if (err) { done = true; callback(err); } else if (value === breakLoop || (done && running <= 0)) { done = true; return callback(null); } else { replenish(); } } function replenish () { while (running < limit && !done) { var elem = nextElem(); if (elem === null) { done = true; if (running <= 0) { callback(null); } return; } running += 1; iteratee(elem.value, elem.key, onlyOnce(iterateeCallback)); } } replenish(); }; } /** * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a * time. * * @name eachOfLimit * @static * @memberOf module:Collections * @method * @see [async.eachOf]{@link module:Collections.eachOf} * @alias forEachOfLimit * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {number} limit - The maximum number of async operations at a time. * @param {AsyncFunction} iteratee - An async function to apply to each * item in `coll`. The `key` is the item's key, or index in the case of an * array. * Invoked with (item, key, callback). * @param {Function} [callback] - A callback which is called when all * `iteratee` functions have finished, or an error occurs. Invoked with (err). */ function eachOfLimit(coll, limit, iteratee, callback) { _eachOfLimit(limit)(coll, wrapAsync(iteratee), callback); } function doLimit(fn, limit) { return function (iterable, iteratee, callback) { return fn(iterable, limit, iteratee, callback); }; } // eachOf implementation optimized for array-likes function eachOfArrayLike(coll, iteratee, callback) { callback = once(callback || noop); var index = 0, completed = 0, length = coll.length; if (length === 0) { callback(null); } function iteratorCallback(err, value) { if (err) { callback(err); } else if ((++completed === length) || value === breakLoop) { callback(null); } } for (; index < length; index++) { iteratee(coll[index], index, onlyOnce(iteratorCallback)); } } // a generic version of eachOf which can handle array, object, and iterator cases. var eachOfGeneric = doLimit(eachOfLimit, Infinity); /** * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument * to the iteratee. * * @name eachOf * @static * @memberOf module:Collections * @method * @alias forEachOf * @category Collection * @see [async.each]{@link module:Collections.each} * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {AsyncFunction} iteratee - A function to apply to each * item in `coll`. * The `key` is the item's key, or index in the case of an array. * Invoked with (item, key, callback). * @param {Function} [callback] - A callback which is called when all * `iteratee` functions have finished, or an error occurs. Invoked with (err). * @example * * var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"}; * var configs = {}; * * async.forEachOf(obj, function (value, key, callback) { * fs.readFile(__dirname + value, "utf8", function (err, data) { * if (err) return callback(err); * try { * configs[key] = JSON.parse(data); * } catch (e) { * return callback(e); * } * callback(); * }); * }, function (err) { * if (err) console.error(err.message); * // configs is now a map of JSON data * doSomethingWith(configs); * }); */ var eachOf = function(coll, iteratee, callback) { var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric; eachOfImplementation(coll, wrapAsync(iteratee), callback); }; function doParallel(fn) { return function (obj, iteratee, callback) { return fn(eachOf, obj, wrapAsync(iteratee), callback); }; } function _asyncMap(eachfn, arr, iteratee, callback) { callback = callback || noop; arr = arr || []; var results = []; var counter = 0; var _iteratee = wrapAsync(iteratee); eachfn(arr, function (value, _, callback) { var index = counter++; _iteratee(value, function (err, v) { results[index] = v; callback(err); }); }, function (err) { callback(err, results); }); } /** * Produces a new collection of values by mapping each value in `coll` through * the `iteratee` function. The `iteratee` is called with an item from `coll` * and a callback for when it has finished processing. Each of these callback * takes 2 arguments: an `error`, and the transformed item from `coll`. If * `iteratee` passes an error to its callback, the main `callback` (for the * `map` function) is immediately called with the error. * * Note, that since this function applies the `iteratee` to each item in * parallel, there is no guarantee that the `iteratee` functions will complete * in order. However, the results array will be in the same order as the * original `coll`. * * If `map` is passed an Object, the results will be an Array. The results * will roughly be in the order of the original Objects' keys (but this can * vary across JavaScript engines). * * @name map * @static * @memberOf module:Collections * @method * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {AsyncFunction} iteratee - An async function to apply to each item in * `coll`. * The iteratee should complete with the transformed item. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called when all `iteratee` * functions have finished, or an error occurs. Results is an Array of the * transformed items from the `coll`. Invoked with (err, results). * @example * * async.map(['file1','file2','file3'], fs.stat, function(err, results) { * // results is now an array of stats for each file * }); */ var map = doParallel(_asyncMap); /** * Applies the provided arguments to each function in the array, calling * `callback` after all functions have completed. If you only provide the first * argument, `fns`, then it will return a function which lets you pass in the * arguments as if it were a single function call. If more arguments are * provided, `callback` is required while `args` is still optional. * * @name applyEach * @static * @memberOf module:ControlFlow * @method * @category Control Flow * @param {Array|Iterable|Object} fns - A collection of {@link AsyncFunction}s * to all call with the same arguments * @param {...*} [args] - any number of separate arguments to pass to the * function. * @param {Function} [callback] - the final argument should be the callback, * called when all functions have completed processing. * @returns {Function} - If only the first argument, `fns`, is provided, it will * return a function which lets you pass in the arguments as if it were a single * function call. The signature is `(..args, callback)`. If invoked with any * arguments, `callback` is required. * @example * * async.applyEach([enableSearch, updateSchema], 'bucket', callback); * * // partial application example: * async.each( * buckets, * async.applyEach([enableSearch, updateSchema]), * callback * ); */ var applyEach = applyEach$1(map); function doParallelLimit(fn) { return function (obj, limit, iteratee, callback) { return fn(_eachOfLimit(limit), obj, wrapAsync(iteratee), callback); }; } /** * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. * * @name mapLimit * @static * @memberOf module:Collections * @method * @see [async.map]{@link module:Collections.map} * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {number} limit - The maximum number of async operations at a time. * @param {AsyncFunction} iteratee - An async function to apply to each item in * `coll`. * The iteratee should complete with the transformed item. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called when all `iteratee` * functions have finished, or an error occurs. Results is an array of the * transformed items from the `coll`. Invoked with (err, results). */ var mapLimit = doParallelLimit(_asyncMap); /** * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. * * @name mapSeries * @static * @memberOf module:Collections * @method * @see [async.map]{@link module:Collections.map} * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {AsyncFunction} iteratee - An async function to apply to each item in * `coll`. * The iteratee should complete with the transformed item. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called when all `iteratee` * functions have finished, or an error occurs. Results is an array of the * transformed items from the `coll`. Invoked with (err, results). */ var mapSeries = doLimit(mapLimit, 1); /** * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. * * @name applyEachSeries * @static * @memberOf module:ControlFlow * @method * @see [async.applyEach]{@link module:ControlFlow.applyEach} * @category Control Flow * @param {Array|Iterable|Object} fns - A collection of {@link AsyncFunction}s to all * call with the same arguments * @param {...*} [args] - any number of separate arguments to pass to the * function. * @param {Function} [callback] - the final argument should be the callback, * called when all functions have completed processing. * @returns {Function} - If only the first argument is provided, it will return * a function which lets you pass in the arguments as if it were a single * function call. */ var applyEachSeries = applyEach$1(mapSeries); /** * A specialized version of `_.forEach` for arrays without support for * iteratee shorthands. * * @private * @param {Array} [array] The array to iterate over. * @param {Function} iteratee The function invoked per iteration. * @returns {Array} Returns `array`. */ function arrayEach(array, iteratee) { var index = -1, length = array == null ? 0 : array.length; while (++index < length) { if (iteratee(array[index], index, array) === false) { break; } } return array; } /** * Creates a base function for methods like `_.forIn` and `_.forOwn`. * * @private * @param {boolean} [fromRight] Specify iterating from right to left. * @returns {Function} Returns the new base function. */ function createBaseFor(fromRight) { return function(object, iteratee, keysFunc) { var index = -1, iterable = Object(object), props = keysFunc(object), length = props.length; while (length--) { var key = props[fromRight ? length : ++index]; if (iteratee(iterable[key], key, iterable) === false) { break; } } return object; }; } /** * The base implementation of `baseForOwn` which iterates over `object` * properties returned by `keysFunc` and invokes `iteratee` for each property. * Iteratee functions may exit iteration early by explicitly returning `false`. * * @private * @param {Object} object The object to iterate over. * @param {Function} iteratee The function invoked per iteration. * @param {Function} keysFunc The function to get the keys of `object`. * @returns {Object} Returns `object`. */ var baseFor = createBaseFor(); /** * The base implementation of `_.forOwn` without support for iteratee shorthands. * * @private * @param {Object} object The object to iterate over. * @param {Function} iteratee The function invoked per iteration. * @returns {Object} Returns `object`. */ function baseForOwn(object, iteratee) { return object && baseFor(object, iteratee, keys); } /** * The base implementation of `_.findIndex` and `_.findLastIndex` without * support for iteratee shorthands. * * @private * @param {Array} array The array to inspect. * @param {Function} predicate The function invoked per iteration. * @param {number} fromIndex The index to search from. * @param {boolean} [fromRight] Specify iterating from right to left. * @returns {number} Returns the index of the matched value, else `-1`. */ function baseFindIndex(array, predicate, fromIndex, fromRight) { var length = array.length, index = fromIndex + (fromRight ? 1 : -1); while ((fromRight ? index-- : ++index < length)) { if (predicate(array[index], index, array)) { return index; } } return -1; } /** * The base implementation of `_.isNaN` without support for number objects. * * @private * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. */ function baseIsNaN(value) { return value !== value; } /** * A specialized version of `_.indexOf` which performs strict equality * comparisons of values, i.e. `===`. * * @private * @param {Array} array The array to inspect. * @param {*} value The value to search for. * @param {number} fromIndex The index to search from. * @returns {number} Returns the index of the matched value, else `-1`. */ function strictIndexOf(array, value, fromIndex) { var index = fromIndex - 1, length = array.length; while (++index < length) { if (array[index] === value) { return index; } } return -1; } /** * The base implementation of `_.indexOf` without `fromIndex` bounds checks. * * @private * @param {Array} array The array to inspect. * @param {*} value The value to search for. * @param {number} fromIndex The index to search from. * @returns {number} Returns the index of the matched value, else `-1`. */ function baseIndexOf(array, value, fromIndex) { return value === value ? strictIndexOf(array, value, fromIndex) : baseFindIndex(array, baseIsNaN, fromIndex); } /** * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on * their requirements. Each function can optionally depend on other functions * being completed first, and each function is run as soon as its requirements * are satisfied. * * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence * will stop. Further tasks will not execute (so any other functions depending * on it will not run), and the main `callback` is immediately called with the * error. * * {@link AsyncFunction}s also receive an object containing the results of functions which * have completed so far as the first argument, if they have dependencies. If a * task function has no dependencies, it will only be passed a callback. * * @name auto * @static * @memberOf module:ControlFlow * @method * @category Control Flow * @param {Object} tasks - An object. Each of its properties is either a * function or an array of requirements, with the {@link AsyncFunction} itself the last item * in the array. The object's key of a property serves as the name of the task * defined by that property, i.e. can be used when specifying requirements for * other tasks. The function receives one or two arguments: * * a `results` object, containing the results of the previously executed * functions, only passed if the task has any dependencies, * * a `callback(err, result)` function, which must be called when finished, * passing an `error` (which can be `null`) and the result of the function's * execution. * @param {number} [concurrency=Infinity] - An optional `integer` for * determining the maximum number of tasks that can be run in parallel. By * default, as many as possible. * @param {Function} [callback] - An optional callback which is called when all * the tasks have been completed. It receives the `err` argument if any `tasks` * pass an error to their callback. Results are always returned; however, if an * error occurs, no further `tasks` will be performed, and the results object * will only contain partial results. Invoked with (err, results). * @returns undefined * @example * * async.auto({ * // this function will just be passed a callback * readData: async.apply(fs.readFile, 'data.txt', 'utf-8'), * showData: ['readData', function(results, cb) { * // results.readData is the file's contents * // ... * }] * }, callback); * * async.auto({ * get_data: function(callback) { * console.log('in get_data'); * // async code to get some data * callback(null, 'data', 'converted to array'); * }, * make_folder: function(callback) { * console.log('in make_folder'); * // async code to create a directory to store a file in * // this is run at the same time as getting the data * callback(null, 'folder'); * }, * write_file: ['get_data', 'make_folder', function(results, callback) { * console.log('in write_file', JSON.stringify(results)); * // once there is some data and the directory exists, * // write the data to a file in the directory * callback(null, 'filename'); * }], * email_link: ['write_file', function(results, callback) { * console.log('in email_link', JSON.stringify(results)); * // once the file is written let's email a link to it... * // results.write_file contains the filename returned by write_file. * callback(null, {'file':results.write_file, 'email':'user@example.com'}); * }] * }, function(err, results) { * console.log('err = ', err); * console.log('results = ', results); * }); */ var auto = function (tasks, concurrency, callback) { if (typeof concurrency === 'function') { // concurrency is optional, shift the args. callback = concurrency; concurrency = null; } callback = once(callback || noop); var keys$$1 = keys(tasks); var numTasks = keys$$1.length; if (!numTasks) { return callback(null); } if (!concurrency) { concurrency = numTasks; } var results = {}; var runningTasks = 0; var hasError = false; var listeners = Object.create(null); var readyTasks = []; // for cycle detection: var readyToCheck = []; // tasks that have been identified as reachable // without the possibility of returning to an ancestor task var uncheckedDependencies = {}; baseForOwn(tasks, function (task, key) { if (!isArray(task)) { // no dependencies enqueueTask(key, [task]); readyToCheck.push(key); return; } var dependencies = task.slice(0, task.length - 1); var remainingDependencies = dependencies.length; if (remainingDependencies === 0) { enqueueTask(key, task); readyToCheck.push(key); return; } uncheckedDependencies[key] = remainingDependencies; arrayEach(dependencies, function (dependencyName) { if (!tasks[dependencyName]) { throw new Error('async.auto task `' + key + '` has a non-existent dependency `' + dependencyName + '` in ' + dependencies.join(', ')); } addListener(dependencyName, function () { remainingDependencies--; if (remainingDependencies === 0) { enqueueTask(key, task); } }); }); }); checkForDeadlocks(); processQueue(); function enqueueTask(key, task) { readyTasks.push(function () { runTask(key, task); }); } function processQueue() { if (readyTasks.length === 0 && runningTasks === 0) { return callback(null, results); } while(readyTasks.length && runningTasks < concurrency) { var run = readyTasks.shift(); run(); } } function addListener(taskName, fn) { var taskListeners = listeners[taskName]; if (!taskListeners) { taskListeners = listeners[taskName] = []; } taskListeners.push(fn); } function taskComplete(taskName) { var taskListeners = listeners[taskName] || []; arrayEach(taskListeners, function (fn) { fn(); }); processQueue(); } function runTask(key, task) { if (hasError) return; var taskCallback = onlyOnce(function(err, result) { runningTasks--; if (arguments.length > 2) { result = slice(arguments, 1); } if (err) { var safeResults = {}; baseForOwn(results, function(val, rkey) { safeResults[rkey] = val; }); safeResults[key] = result; hasError = true; listeners = Object.create(null); callback(err, safeResults); } else { results[key] = result; taskComplete(key); } }); runningTasks++; var taskFn = wrapAsync(task[task.length - 1]); if (task.length > 1) { taskFn(results, taskCallback); } else { taskFn(taskCallback); } } function checkForDeadlocks() { // Kahn's algorithm // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html var currentTask; var counter = 0; while (readyToCheck.length) { currentTask = readyToCheck.pop(); counter++; arrayEach(getDependents(currentTask), function (dependent) { if (--uncheckedDependencies[dependent] === 0) { readyToCheck.push(dependent); } }); } if (counter !== numTasks) { throw new Error( 'async.auto cannot execute tasks due to a recursive dependency' ); } } function getDependents(taskName) { var result = []; baseForOwn(tasks, function (task, key) { if (isArray(task) && baseIndexOf(task, taskName, 0) >= 0) { result.push(key); } }); return result; } }; /** * A specialized version of `_.map` for arrays without support for iteratee * shorthands. * * @private * @param {Array} [array] The array to iterate over. * @param {Function} iteratee The function invoked per iteration. * @returns {Array} Returns the new mapped array. */ function arrayMap(array, iteratee) { var index = -1, length = array == null ? 0 : array.length, result = Array(length); while (++index < length) { result[index] = iteratee(array[index], index, array); } return result; } /** `Object#toString` result references. */ var symbolTag = '[object Symbol]'; /** * Checks if `value` is classified as a `Symbol` primitive or object. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to check. * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. * @example * * _.isSymbol(Symbol.iterator); * // => true * * _.isSymbol('abc'); * // => false */ function isSymbol(value) { return typeof value == 'symbol' || (isObjectLike(value) && baseGetTag(value) == symbolTag); } /** Used as references for various `Number` constants. */ var INFINITY = 1 / 0; /** Used to convert symbols to primitives and strings. */ var symbolProto = Symbol$1 ? Symbol$1.prototype : undefined; var symbolToString = symbolProto ? symbolProto.toString : undefined; /** * The base implementation of `_.toString` which doesn't convert nullish * values to empty strings. * * @private * @param {*} value The value to process. * @returns {string} Returns the string. */ function baseToString(value) { // Exit early for strings to avoid a performance hit in some environments. if (typeof value == 'string') { return value; } if (isArray(value)) { // Recursively convert values (susceptible to call stack limits). return arrayMap(value, baseToString) + ''; } if (isSymbol(value)) { return symbolToString ? symbolToString.call(value) : ''; } var result = (value + ''); return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; } /** * The base implementation of `_.slice` without an iteratee call guard. * * @private * @param {Array} array The array to slice. * @param {number} [start=0] The start position. * @param {number} [end=array.length] The end position. * @returns {Array} Returns the slice of `array`. */ function baseSlice(array, start, end) { var index = -1, length = array.length; if (start < 0) { start = -start > length ? 0 : (length + start); } end = end > length ? length : end; if (end < 0) { end += length; } length = start > end ? 0 : ((end - start) >>> 0); start >>>= 0; var result = Array(length); while (++index < length) { result[index] = array[index + start]; } return result; } /** * Casts `array` to a slice if it's needed. * * @private * @param {Array} array The array to inspect. * @param {number} start The start position. * @param {number} [end=array.length] The end position. * @returns {Array} Returns the cast slice. */ function castSlice(array, start, end) { var length = array.length; end = end === undefined ? length : end; return (!start && end >= length) ? array : baseSlice(array, start, end); } /** * Used by `_.trim` and `_.trimEnd` to get the index of the last string symbol * that is not found in the character symbols. * * @private * @param {Array} strSymbols The string symbols to inspect. * @param {Array} chrSymbols The character symbols to find. * @returns {number} Returns the index of the last unmatched string symbol. */ function charsEndIndex(strSymbols, chrSymbols) { var index = strSymbols.length; while (index-- && baseIndexOf(chrSymbols, strSymbols[index], 0) > -1) {} return index; } /** * Used by `_.trim` and `_.trimStart` to get the index of the first string symbol * that is not found in the character symbols. * * @private * @param {Array} strSymbols The string symbols to inspect. * @param {Array} chrSymbols The character symbols to find. * @returns {number} Returns the index of the first unmatched string symbol. */ function charsStartIndex(strSymbols, chrSymbols) { var index = -1, length = strSymbols.length; while (++index < length && baseIndexOf(chrSymbols, strSymbols[index], 0) > -1) {} return index; } /** * Converts an ASCII `string` to an array. * * @private * @param {string} string The string to convert. * @returns {Array} Returns the converted array. */ function asciiToArray(string) { return string.split(''); } /** Used to compose unicode character classes. */ var rsAstralRange = '\\ud800-\\udfff'; var rsComboMarksRange = '\\u0300-\\u036f'; var reComboHalfMarksRange = '\\ufe20-\\ufe2f'; var rsComboSymbolsRange = '\\u20d0-\\u20ff'; var rsComboRange = rsComboMarksRange + reComboHalfMarksRange + rsComboSymbolsRange; var rsVarRange = '\\ufe0e\\ufe0f'; /** Used to compose unicode capture groups. */ var rsZWJ = '\\u200d'; /** Used to detect strings with [zero-width joiners or code points from the astral planes](http://eev.ee/blog/2015/09/12/dark-corners-of-unicode/). */ var reHasUnicode = RegExp('[' + rsZWJ + rsAstralRange + rsComboRange + rsVarRange + ']'); /** * Checks if `string` contains Unicode symbols. * * @private * @param {string} string The string to inspect. * @returns {boolean} Returns `true` if a symbol is found, else `false`. */ function hasUnicode(string) { return reHasUnicode.test(string); } /** Used to compose unicode character classes. */ var rsAstralRange$1 = '\\ud800-\\udfff'; var rsComboMarksRange$1 = '\\u0300-\\u036f'; var reComboHalfMarksRange$1 = '\\ufe20-\\ufe2f'; var rsComboSymbolsRange$1 = '\\u20d0-\\u20ff'; var rsComboRange$1 = rsComboMarksRange$1 + reComboHalfMarksRange$1 + rsComboSymbolsRange$1; var rsVarRange$1 = '\\ufe0e\\ufe0f'; /** Used to compose unicode capture groups. */ var rsAstral = '[' + rsAstralRange$1 + ']'; var rsCombo = '[' + rsComboRange$1 + ']'; var rsFitz = '\\ud83c[\\udffb-\\udfff]'; var rsModifier = '(?:' + rsCombo + '|' + rsFitz + ')'; var rsNonAstral = '[^' + rsAstralRange$1 + ']'; var rsRegional = '(?:\\ud83c[\\udde6-\\uddff]){2}'; var rsSurrPair = '[\\ud800-\\udbff][\\udc00-\\udfff]'; var rsZWJ$1 = '\\u200d'; /** Used to compose unicode regexes. */ var reOptMod = rsModifier + '?'; var rsOptVar = '[' + rsVarRange$1 + ']?'; var rsOptJoin = '(?:' + rsZWJ$1 + '(?:' + [rsNonAstral, rsRegional, rsSurrPair].join('|') + ')' + rsOptVar + reOptMod + ')*'; var rsSeq = rsOptVar + reOptMod + rsOptJoin; var rsSymbol = '(?:' + [rsNonAstral + rsCombo + '?', rsCombo, rsRegional, rsSurrPair, rsAstral].join('|') + ')'; /** Used to match [string symbols](https://mathiasbynens.be/notes/javascript-unicode). */ var reUnicode = RegExp(rsFitz + '(?=' + rsFitz + ')|' + rsSymbol + rsSeq, 'g'); /** * Converts a Unicode `string` to an array. * * @private * @param {string} string The string to convert. * @returns {Array} Returns the converted array. */ function unicodeToArray(string) { return string.match(reUnicode) || []; } /** * Converts `string` to an array. * * @private * @param {string} string The string to convert. * @returns {Array} Returns the converted array. */ function stringToArray(string) { return hasUnicode(string) ? unicodeToArray(string) : asciiToArray(string); } /** * Converts `value` to a string. An empty string is returned for `null` * and `undefined` values. The sign of `-0` is preserved. * * @static * @memberOf _ * @since 4.0.0 * @category Lang * @param {*} value The value to convert. * @returns {string} Returns the converted string. * @example * * _.toString(null); * // => '' * * _.toString(-0); * // => '-0' * * _.toString([1, 2, 3]); * // => '1,2,3' */ function toString(value) { return value == null ? '' : baseToString(value); } /** Used to match leading and trailing whitespace. */ var reTrim = /^\s+|\s+$/g; /** * Removes leading and trailing whitespace or specified characters from `string`. * * @static * @memberOf _ * @since 3.0.0 * @category String * @param {string} [string=''] The string to trim. * @param {string} [chars=whitespace] The characters to trim. * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. * @returns {string} Returns the trimmed string. * @example * * _.trim(' abc '); * // => 'abc' * * _.trim('-_-abc-_-', '_-'); * // => 'abc' * * _.map([' foo ', ' bar '], _.trim); * // => ['foo', 'bar'] */ function trim(string, chars, guard) { string = toString(string); if (string && (guard || chars === undefined)) { return string.replace(reTrim, ''); } if (!string || !(chars = baseToString(chars))) { return string; } var strSymbols = stringToArray(string), chrSymbols = stringToArray(chars), start = charsStartIndex(strSymbols, chrSymbols), end = charsEndIndex(strSymbols, chrSymbols) + 1; return castSlice(strSymbols, start, end).join(''); } var FN_ARGS = /^(?:async\s+)?(function)?\s*[^\(]*\(\s*([^\)]*)\)/m; var FN_ARG_SPLIT = /,/; var FN_ARG = /(=.+)?(\s*)$/; var STRIP_COMMENTS = /((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg; function parseParams(func) { func = func.toString().replace(STRIP_COMMENTS, ''); func = func.match(FN_ARGS)[2].replace(' ', ''); func = func ? func.split(FN_ARG_SPLIT) : []; func = func.map(function (arg){ return trim(arg.replace(FN_ARG, '')); }); return func; } /** * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent * tasks are specified as parameters to the function, after the usual callback * parameter, with the parameter names matching the names of the tasks it * depends on. This can provide even more readable task graphs which can be * easier to maintain. * * If a final callback is specified, the task results are similarly injected, * specified as named parameters after the initial error parameter. * * The autoInject function is purely syntactic sugar and its semantics are * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. * * @name autoInject * @static * @memberOf module:ControlFlow * @method * @see [async.auto]{@link module:ControlFlow.auto} * @category Control Flow * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of * the form 'func([dependencies...], callback). The object's key of a property * serves as the name of the task defined by that property, i.e. can be used * when specifying requirements for other tasks. * * The `callback` parameter is a `callback(err, result)` which must be called * when finished, passing an `error` (which can be `null`) and the result of * the function's execution. The remaining parameters name other tasks on * which the task is dependent, and the results from those tasks are the * arguments of those parameters. * @param {Function} [callback] - An optional callback which is called when all * the tasks have been completed. It receives the `err` argument if any `tasks` * pass an error to their callback, and a `results` object with any completed * task results, similar to `auto`. * @example * * // The example from `auto` can be rewritten as follows: * async.autoInject({ * get_data: function(callback) { * // async code to get some data * callback(null, 'data', 'converted to array'); * }, * make_folder: function(callback) { * // async code to create a directory to store a file in * // this is run at the same time as getting the data * callback(null, 'folder'); * }, * write_file: function(get_data, make_folder, callback) { * // once there is some data and the directory exists, * // write the data to a file in the directory * callback(null, 'filename'); * }, * email_link: function(write_file, callback) { * // once the file is written let's email a link to it... * // write_file contains the filename returned by write_file. * callback(null, {'file':write_file, 'email':'user@example.com'}); * } * }, function(err, results) { * console.log('err = ', err); * console.log('email_link = ', results.email_link); * }); * * // If you are using a JS minifier that mangles parameter names, `autoInject` * // will not work with plain functions, since the parameter names will be * // collapsed to a single letter identifier. To work around this, you can * // explicitly specify the names of the parameters your task function needs * // in an array, similar to Angular.js dependency injection. * * // This still has an advantage over plain `auto`, since the results a task * // depends on are still spread into arguments. * async.autoInject({ * //... * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { * callback(null, 'filename'); * }], * email_link: ['write_file', function(write_file, callback) { * callback(null, {'file':write_file, 'email':'user@example.com'}); * }] * //... * }, function(err, results) { * console.log('err = ', err); * console.log('email_link = ', results.email_link); * }); */ function autoInject(tasks, callback) { var newTasks = {}; baseForOwn(tasks, function (taskFn, key) { var params; var fnIsAsync = isAsync(taskFn); var hasNoDeps = (!fnIsAsync && taskFn.length === 1) || (fnIsAsync && taskFn.length === 0); if (isArray(taskFn)) { params = taskFn.slice(0, -1); taskFn = taskFn[taskFn.length - 1]; newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); } else if (hasNoDeps) { // no dependencies, use the function as-is newTasks[key] = taskFn; } else { params = parseParams(taskFn); if (taskFn.length === 0 && !fnIsAsync && params.length === 0) { throw new Error("autoInject task functions require explicit parameters."); } // remove callback param if (!fnIsAsync) params.pop(); newTasks[key] = params.concat(newTask); } function newTask(results, taskCb) { var newArgs = arrayMap(params, function (name) { return results[name]; }); newArgs.push(taskCb); wrapAsync(taskFn).apply(null, newArgs); } }); auto(newTasks, callback); } // Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation // used for queues. This implementation assumes that the node provided by the user can be modified // to adjust the next and last properties. We implement only the minimal functionality // for queue support. function DLL() { this.head = this.tail = null; this.length = 0; } function setInitial(dll, node) { dll.length = 1; dll.head = dll.tail = node; } DLL.prototype.removeLink = function(node) { if (node.prev) node.prev.next = node.next; else this.head = node.next; if (node.next) node.next.prev = node.prev; else this.tail = node.prev; node.prev = node.next = null; this.length -= 1; return node; }; DLL.prototype.empty = function () { while(this.head) this.shift(); return this; }; DLL.prototype.insertAfter = function(node, newNode) { newNode.prev = node; newNode.next = node.next; if (node.next) node.next.prev = newNode; else this.tail = newNode; node.next = newNode; this.length += 1; }; DLL.prototype.insertBefore = function(node, newNode) { newNode.prev = node.prev; newNode.next = node; if (node.prev) node.prev.next = newNode; else this.head = newNode; node.prev = newNode; this.length += 1; }; DLL.prototype.unshift = function(node) { if (this.head) this.insertBefore(this.head, node); else setInitial(this, node); }; DLL.prototype.push = function(node) { if (this.tail) this.insertAfter(this.tail, node); else setInitial(this, node); }; DLL.prototype.shift = function() { return this.head && this.removeLink(this.head); }; DLL.prototype.pop = function() { return this.tail && this.removeLink(this.tail); }; DLL.prototype.toArray = function () { var arr = Array(this.length); var curr = this.head; for(var idx = 0; idx < this.length; idx++) { arr[idx] = curr.data; curr = curr.next; } return arr; }; DLL.prototype.remove = function (testFn) { var curr = this.head; while(!!curr) { var next = curr.next; if (testFn(curr)) { this.removeLink(curr); } curr = next; } return this; }; function queue(worker, concurrency, payload) { if (concurrency == null) { concurrency = 1; } else if(concurrency === 0) { throw new Error('Concurrency must not be zero'); } var _worker = wrapAsync(worker); var numRunning = 0; var workersList = []; var processingScheduled = false; function _insert(data, insertAtFront, callback) { if (callback != null && typeof callback !== 'function') { throw new Error('task callback must be a function'); } q.started = true; if (!isArray(data)) { data = [data]; } if (data.length === 0 && q.idle()) { // call drain immediately if there are no tasks return setImmediate$1(function() { q.drain(); }); } for (var i = 0, l = data.length; i < l; i++) { var item = { data: data[i], callback: callback || noop }; if (insertAtFront) { q._tasks.unshift(item); } else { q._tasks.push(item); } } if (!processingScheduled) { processingScheduled = true; setImmediate$1(function() { processingScheduled = false; q.process(); }); } } function _next(tasks) { return function(err){ numRunning -= 1; for (var i = 0, l = tasks.length; i < l; i++) { var task = tasks[i]; var index = baseIndexOf(workersList, task, 0); if (index === 0) { workersList.shift(); } else if (index > 0) { workersList.splice(index, 1); } task.callback.apply(task, arguments); if (err != null) { q.error(err, task.data); } } if (numRunning <= (q.concurrency - q.buffer) ) { q.unsaturated(); } if (q.idle()) { q.drain(); } q.process(); }; } var isProcessing = false; var q = { _tasks: new DLL(), concurrency: concurrency, payload: payload, saturated: noop, unsaturated:noop, buffer: concurrency / 4, empty: noop, drain: noop, error: noop, started: false, paused: false, push: function (data, callback) { _insert(data, false, callback); }, kill: function () { q.drain = noop; q._tasks.empty(); }, unshift: function (data, callback) { _insert(data, true, callback); }, remove: function (testFn) { q._tasks.remove(testFn); }, process: function () { // Avoid trying to start too many processing operations. This can occur // when callbacks resolve synchronously (#1267). if (isProcessing) { return; } isProcessing = true; while(!q.paused && numRunning < q.concurrency && q._tasks.length){ var tasks = [], data = []; var l = q._tasks.length; if (q.payload) l = Math.min(l, q.payload); for (var i = 0; i < l; i++) { var node = q._tasks.shift(); tasks.push(node); workersList.push(node); data.push(node.data); } numRunning += 1; if (q._tasks.length === 0) { q.empty(); } if (numRunning === q.concurrency) { q.saturated(); } var cb = onlyOnce(_next(tasks)); _worker(data, cb); } isProcessing = false; }, length: function () { return q._tasks.length; }, running: function () { return numRunning; }, workersList: function () { return workersList; }, idle: function() { return q._tasks.length + numRunning === 0; }, pause: function () { q.paused = true; }, resume: function () { if (q.paused === false) { return; } q.paused = false; setImmediate$1(q.process); } }; return q; } /** * A cargo of tasks for the worker function to complete. Cargo inherits all of * the same methods and event callbacks as [`queue`]{@link module:ControlFlow.queue}. * @typedef {Object} CargoObject * @memberOf module:ControlFlow * @property {Function} length - A function returning the number of items * waiting to be processed. Invoke like `cargo.length()`. * @property {number} payload - An `integer` for determining how many tasks * should be process per round. This property can be changed after a `cargo` is * created to alter the payload on-the-fly. * @property {Function} push - Adds `task` to the `queue`. The callback is * called once the `worker` has finished processing the task. Instead of a * single task, an array of `tasks` can be submitted. The respective callback is * used for every task in the list. Invoke like `cargo.push(task, [callback])`. * @property {Function} saturated - A callback that is called when the * `queue.length()` hits the concurrency and further tasks will be queued. * @property {Function} empty - A callback that is called when the last item * from the `queue` is given to a `worker`. * @property {Function} drain - A callback that is called when the last item * from the `queue` has returned from the `worker`. * @property {Function} idle - a function returning false if there are items * waiting or being processed, or true if not. Invoke like `cargo.idle()`. * @property {Function} pause - a function that pauses the processing of tasks * until `resume()` is called. Invoke like `cargo.pause()`. * @property {Function} resume - a function that resumes the processing of * queued tasks when the queue is paused. Invoke like `cargo.resume()`. * @property {Function} kill - a function that removes the `drain` callback and * empties remaining tasks from the queue forcing it to go idle. Invoke like `cargo.kill()`. */ /** * Creates a `cargo` object with the specified payload. Tasks added to the * cargo will be processed altogether (up to the `payload` limit). If the * `worker` is in progress, the task is queued until it becomes available. Once * the `worker` has completed some tasks, each callback of those tasks is * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) * for how `cargo` and `queue` work. * * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers * at a time, cargo passes an array of tasks to a single worker, repeating * when the worker is finished. * * @name cargo * @static * @memberOf module:ControlFlow * @method * @see [async.queue]{@link module:ControlFlow.queue} * @category Control Flow * @param {AsyncFunction} worker - An asynchronous function for processing an array * of queued tasks. Invoked with `(tasks, callback)`. * @param {number} [payload=Infinity] - An optional `integer` for determining * how many tasks should be processed per round; if omitted, the default is * unlimited. * @returns {module:ControlFlow.CargoObject} A cargo object to manage the tasks. Callbacks can * attached as certain properties to listen for specific events during the * lifecycle of the cargo and inner queue. * @example * * // create a cargo object with payload 2 * var cargo = async.cargo(function(tasks, callback) { * for (var i=0; i true */ function identity(value) { return value; } function _createTester(check, getResult) { return function(eachfn, arr, iteratee, cb) { cb = cb || noop; var testPassed = false; var testResult; eachfn(arr, function(value, _, callback) { iteratee(value, function(err, result) { if (err) { callback(err); } else if (check(result) && !testResult) { testPassed = true; testResult = getResult(true, value); callback(null, breakLoop); } else { callback(); } }); }, function(err) { if (err) { cb(err); } else { cb(null, testPassed ? testResult : getResult(false)); } }); }; } function _findGetResult(v, x) { return x; } /** * Returns the first value in `coll` that passes an async truth test. The * `iteratee` is applied in parallel, meaning the first iteratee to return * `true` will fire the detect `callback` with that result. That means the * result might not be the first item in the original `coll` (in terms of order) * that passes the test. * If order within the original `coll` is important, then look at * [`detectSeries`]{@link module:Collections.detectSeries}. * * @name detect * @static * @memberOf module:Collections * @method * @alias find * @category Collections * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. * The iteratee must complete with a boolean value as its result. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called as soon as any * iteratee returns `true`, or after all the `iteratee` functions have finished. * Result will be the first item in the array that passes the truth test * (iteratee) or the value `undefined` if none passed. Invoked with * (err, result). * @example * * async.detect(['file1','file2','file3'], function(filePath, callback) { * fs.access(filePath, function(err) { * callback(null, !err) * }); * }, function(err, result) { * // result now equals the first file in the list that exists * }); */ var detect = doParallel(_createTester(identity, _findGetResult)); /** * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a * time. * * @name detectLimit * @static * @memberOf module:Collections * @method * @see [async.detect]{@link module:Collections.detect} * @alias findLimit * @category Collections * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {number} limit - The maximum number of async operations at a time. * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. * The iteratee must complete with a boolean value as its result. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called as soon as any * iteratee returns `true`, or after all the `iteratee` functions have finished. * Result will be the first item in the array that passes the truth test * (iteratee) or the value `undefined` if none passed. Invoked with * (err, result). */ var detectLimit = doParallelLimit(_createTester(identity, _findGetResult)); /** * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. * * @name detectSeries * @static * @memberOf module:Collections * @method * @see [async.detect]{@link module:Collections.detect} * @alias findSeries * @category Collections * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. * The iteratee must complete with a boolean value as its result. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called as soon as any * iteratee returns `true`, or after all the `iteratee` functions have finished. * Result will be the first item in the array that passes the truth test * (iteratee) or the value `undefined` if none passed. Invoked with * (err, result). */ var detectSeries = doLimit(detectLimit, 1); function consoleFunc(name) { return function (fn/*, ...args*/) { var args = slice(arguments, 1); args.push(function (err/*, ...args*/) { var args = slice(arguments, 1); if (typeof console === 'object') { if (err) { if (console.error) { console.error(err); } } else if (console[name]) { arrayEach(args, function (x) { console[name](x); }); } } }); wrapAsync(fn).apply(null, args); }; } /** * Logs the result of an [`async` function]{@link AsyncFunction} to the * `console` using `console.dir` to display the properties of the resulting object. * Only works in Node.js or in browsers that support `console.dir` and * `console.error` (such as FF and Chrome). * If multiple arguments are returned from the async function, * `console.dir` is called on each argument in order. * * @name dir * @static * @memberOf module:Utils * @method * @category Util * @param {AsyncFunction} function - The function you want to eventually apply * all arguments to. * @param {...*} arguments... - Any number of arguments to apply to the function. * @example * * // in a module * var hello = function(name, callback) { * setTimeout(function() { * callback(null, {hello: name}); * }, 1000); * }; * * // in the node repl * node> async.dir(hello, 'world'); * {hello: 'world'} */ var dir = consoleFunc('dir'); /** * The post-check version of [`during`]{@link module:ControlFlow.during}. To reflect the difference in * the order of operations, the arguments `test` and `fn` are switched. * * Also a version of [`doWhilst`]{@link module:ControlFlow.doWhilst} with asynchronous `test` function. * @name doDuring * @static * @memberOf module:ControlFlow * @method * @see [async.during]{@link module:ControlFlow.during} * @category Control Flow * @param {AsyncFunction} fn - An async function which is called each time * `test` passes. Invoked with (callback). * @param {AsyncFunction} test - asynchronous truth test to perform before each * execution of `fn`. Invoked with (...args, callback), where `...args` are the * non-error args from the previous callback of `fn`. * @param {Function} [callback] - A callback which is called after the test * function has failed and repeated execution of `fn` has stopped. `callback` * will be passed an error if one occurred, otherwise `null`. */ function doDuring(fn, test, callback) { callback = onlyOnce(callback || noop); var _fn = wrapAsync(fn); var _test = wrapAsync(test); function next(err/*, ...args*/) { if (err) return callback(err); var args = slice(arguments, 1); args.push(check); _test.apply(this, args); } function check(err, truth) { if (err) return callback(err); if (!truth) return callback(null); _fn(next); } check(null, true); } /** * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in * the order of operations, the arguments `test` and `iteratee` are switched. * * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. * * @name doWhilst * @static * @memberOf module:ControlFlow * @method * @see [async.whilst]{@link module:ControlFlow.whilst} * @category Control Flow * @param {AsyncFunction} iteratee - A function which is called each time `test` * passes. Invoked with (callback). * @param {Function} test - synchronous truth test to perform after each * execution of `iteratee`. Invoked with any non-error callback results of * `iteratee`. * @param {Function} [callback] - A callback which is called after the test * function has failed and repeated execution of `iteratee` has stopped. * `callback` will be passed an error and any arguments passed to the final * `iteratee`'s callback. Invoked with (err, [results]); */ function doWhilst(iteratee, test, callback) { callback = onlyOnce(callback || noop); var _iteratee = wrapAsync(iteratee); var next = function(err/*, ...args*/) { if (err) return callback(err); var args = slice(arguments, 1); if (test.apply(this, args)) return _iteratee(next); callback.apply(null, [null].concat(args)); }; _iteratee(next); } /** * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the * argument ordering differs from `until`. * * @name doUntil * @static * @memberOf module:ControlFlow * @method * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} * @category Control Flow * @param {AsyncFunction} iteratee - An async function which is called each time * `test` fails. Invoked with (callback). * @param {Function} test - synchronous truth test to perform after each * execution of `iteratee`. Invoked with any non-error callback results of * `iteratee`. * @param {Function} [callback] - A callback which is called after the test * function has passed and repeated execution of `iteratee` has stopped. `callback` * will be passed an error and any arguments passed to the final `iteratee`'s * callback. Invoked with (err, [results]); */ function doUntil(iteratee, test, callback) { doWhilst(iteratee, function() { return !test.apply(this, arguments); }, callback); } /** * Like [`whilst`]{@link module:ControlFlow.whilst}, except the `test` is an asynchronous function that * is passed a callback in the form of `function (err, truth)`. If error is * passed to `test` or `fn`, the main callback is immediately called with the * value of the error. * * @name during * @static * @memberOf module:ControlFlow * @method * @see [async.whilst]{@link module:ControlFlow.whilst} * @category Control Flow * @param {AsyncFunction} test - asynchronous truth test to perform before each * execution of `fn`. Invoked with (callback). * @param {AsyncFunction} fn - An async function which is called each time * `test` passes. Invoked with (callback). * @param {Function} [callback] - A callback which is called after the test * function has failed and repeated execution of `fn` has stopped. `callback` * will be passed an error, if one occurred, otherwise `null`. * @example * * var count = 0; * * async.during( * function (callback) { * return callback(null, count < 5); * }, * function (callback) { * count++; * setTimeout(callback, 1000); * }, * function (err) { * // 5 seconds have passed * } * ); */ function during(test, fn, callback) { callback = onlyOnce(callback || noop); var _fn = wrapAsync(fn); var _test = wrapAsync(test); function next(err) { if (err) return callback(err); _test(check); } function check(err, truth) { if (err) return callback(err); if (!truth) return callback(null); _fn(next); } _test(check); } function _withoutIndex(iteratee) { return function (value, index, callback) { return iteratee(value, callback); }; } /** * Applies the function `iteratee` to each item in `coll`, in parallel. * The `iteratee` is called with an item from the list, and a callback for when * it has finished. If the `iteratee` passes an error to its `callback`, the * main `callback` (for the `each` function) is immediately called with the * error. * * Note, that since this function applies `iteratee` to each item in parallel, * there is no guarantee that the iteratee functions will complete in order. * * @name each * @static * @memberOf module:Collections * @method * @alias forEach * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {AsyncFunction} iteratee - An async function to apply to * each item in `coll`. Invoked with (item, callback). * The array index is not passed to the iteratee. * If you need the index, use `eachOf`. * @param {Function} [callback] - A callback which is called when all * `iteratee` functions have finished, or an error occurs. Invoked with (err). * @example * * // assuming openFiles is an array of file names and saveFile is a function * // to save the modified contents of that file: * * async.each(openFiles, saveFile, function(err){ * // if any of the saves produced an error, err would equal that error * }); * * // assuming openFiles is an array of file names * async.each(openFiles, function(file, callback) { * * // Perform operation on file here. * console.log('Processing file ' + file); * * if( file.length > 32 ) { * console.log('This file name is too long'); * callback('File name too long'); * } else { * // Do work to process file here * console.log('File processed'); * callback(); * } * }, function(err) { * // if any of the file processing produced an error, err would equal that error * if( err ) { * // One of the iterations produced an error. * // All processing will now stop. * console.log('A file failed to process'); * } else { * console.log('All files have been processed successfully'); * } * }); */ function eachLimit(coll, iteratee, callback) { eachOf(coll, _withoutIndex(wrapAsync(iteratee)), callback); } /** * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. * * @name eachLimit * @static * @memberOf module:Collections * @method * @see [async.each]{@link module:Collections.each} * @alias forEachLimit * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {number} limit - The maximum number of async operations at a time. * @param {AsyncFunction} iteratee - An async function to apply to each item in * `coll`. * The array index is not passed to the iteratee. * If you need the index, use `eachOfLimit`. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called when all * `iteratee` functions have finished, or an error occurs. Invoked with (err). */ function eachLimit$1(coll, limit, iteratee, callback) { _eachOfLimit(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback); } /** * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. * * @name eachSeries * @static * @memberOf module:Collections * @method * @see [async.each]{@link module:Collections.each} * @alias forEachSeries * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {AsyncFunction} iteratee - An async function to apply to each * item in `coll`. * The array index is not passed to the iteratee. * If you need the index, use `eachOfSeries`. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called when all * `iteratee` functions have finished, or an error occurs. Invoked with (err). */ var eachSeries = doLimit(eachLimit$1, 1); /** * Wrap an async function and ensure it calls its callback on a later tick of * the event loop. If the function already calls its callback on a next tick, * no extra deferral is added. This is useful for preventing stack overflows * (`RangeError: Maximum call stack size exceeded`) and generally keeping * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) * contained. ES2017 `async` functions are returned as-is -- they are immune * to Zalgo's corrupting influences, as they always resolve on a later tick. * * @name ensureAsync * @static * @memberOf module:Utils * @method * @category Util * @param {AsyncFunction} fn - an async function, one that expects a node-style * callback as its last argument. * @returns {AsyncFunction} Returns a wrapped function with the exact same call * signature as the function passed in. * @example * * function sometimesAsync(arg, callback) { * if (cache[arg]) { * return callback(null, cache[arg]); // this would be synchronous!! * } else { * doSomeIO(arg, callback); // this IO would be asynchronous * } * } * * // this has a risk of stack overflows if many results are cached in a row * async.mapSeries(args, sometimesAsync, done); * * // this will defer sometimesAsync's callback if necessary, * // preventing stack overflows * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); */ function ensureAsync(fn) { if (isAsync(fn)) return fn; return initialParams(function (args, callback) { var sync = true; args.push(function () { var innerArgs = arguments; if (sync) { setImmediate$1(function () { callback.apply(null, innerArgs); }); } else { callback.apply(null, innerArgs); } }); fn.apply(this, args); sync = false; }); } function notId(v) { return !v; } /** * Returns `true` if every element in `coll` satisfies an async test. If any * iteratee call returns `false`, the main `callback` is immediately called. * * @name every * @static * @memberOf module:Collections * @method * @alias all * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {AsyncFunction} iteratee - An async truth test to apply to each item * in the collection in parallel. * The iteratee must complete with a boolean result value. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called after all the * `iteratee` functions have finished. Result will be either `true` or `false` * depending on the values of the async tests. Invoked with (err, result). * @example * * async.every(['file1','file2','file3'], function(filePath, callback) { * fs.access(filePath, function(err) { * callback(null, !err) * }); * }, function(err, result) { * // if result is true then every file exists * }); */ var every = doParallel(_createTester(notId, notId)); /** * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. * * @name everyLimit * @static * @memberOf module:Collections * @method * @see [async.every]{@link module:Collections.every} * @alias allLimit * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {number} limit - The maximum number of async operations at a time. * @param {AsyncFunction} iteratee - An async truth test to apply to each item * in the collection in parallel. * The iteratee must complete with a boolean result value. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called after all the * `iteratee` functions have finished. Result will be either `true` or `false` * depending on the values of the async tests. Invoked with (err, result). */ var everyLimit = doParallelLimit(_createTester(notId, notId)); /** * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. * * @name everySeries * @static * @memberOf module:Collections * @method * @see [async.every]{@link module:Collections.every} * @alias allSeries * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {AsyncFunction} iteratee - An async truth test to apply to each item * in the collection in series. * The iteratee must complete with a boolean result value. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called after all the * `iteratee` functions have finished. Result will be either `true` or `false` * depending on the values of the async tests. Invoked with (err, result). */ var everySeries = doLimit(everyLimit, 1); /** * The base implementation of `_.property` without support for deep paths. * * @private * @param {string} key The key of the property to get. * @returns {Function} Returns the new accessor function. */ function baseProperty(key) { return function(object) { return object == null ? undefined : object[key]; }; } function filterArray(eachfn, arr, iteratee, callback) { var truthValues = new Array(arr.length); eachfn(arr, function (x, index, callback) { iteratee(x, function (err, v) { truthValues[index] = !!v; callback(err); }); }, function (err) { if (err) return callback(err); var results = []; for (var i = 0; i < arr.length; i++) { if (truthValues[i]) results.push(arr[i]); } callback(null, results); }); } function filterGeneric(eachfn, coll, iteratee, callback) { var results = []; eachfn(coll, function (x, index, callback) { iteratee(x, function (err, v) { if (err) { callback(err); } else { if (v) { results.push({index: index, value: x}); } callback(); } }); }, function (err) { if (err) { callback(err); } else { callback(null, arrayMap(results.sort(function (a, b) { return a.index - b.index; }), baseProperty('value'))); } }); } function _filter(eachfn, coll, iteratee, callback) { var filter = isArrayLike(coll) ? filterArray : filterGeneric; filter(eachfn, coll, wrapAsync(iteratee), callback || noop); } /** * Returns a new array of all the values in `coll` which pass an async truth * test. This operation is performed in parallel, but the results array will be * in the same order as the original. * * @name filter * @static * @memberOf module:Collections * @method * @alias select * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {Function} iteratee - A truth test to apply to each item in `coll`. * The `iteratee` is passed a `callback(err, truthValue)`, which must be called * with a boolean argument once it has completed. Invoked with (item, callback). * @param {Function} [callback] - A callback which is called after all the * `iteratee` functions have finished. Invoked with (err, results). * @example * * async.filter(['file1','file2','file3'], function(filePath, callback) { * fs.access(filePath, function(err) { * callback(null, !err) * }); * }, function(err, results) { * // results now equals an array of the existing files * }); */ var filter = doParallel(_filter); /** * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a * time. * * @name filterLimit * @static * @memberOf module:Collections * @method * @see [async.filter]{@link module:Collections.filter} * @alias selectLimit * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {number} limit - The maximum number of async operations at a time. * @param {Function} iteratee - A truth test to apply to each item in `coll`. * The `iteratee` is passed a `callback(err, truthValue)`, which must be called * with a boolean argument once it has completed. Invoked with (item, callback). * @param {Function} [callback] - A callback which is called after all the * `iteratee` functions have finished. Invoked with (err, results). */ var filterLimit = doParallelLimit(_filter); /** * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. * * @name filterSeries * @static * @memberOf module:Collections * @method * @see [async.filter]{@link module:Collections.filter} * @alias selectSeries * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {Function} iteratee - A truth test to apply to each item in `coll`. * The `iteratee` is passed a `callback(err, truthValue)`, which must be called * with a boolean argument once it has completed. Invoked with (item, callback). * @param {Function} [callback] - A callback which is called after all the * `iteratee` functions have finished. Invoked with (err, results) */ var filterSeries = doLimit(filterLimit, 1); /** * Calls the asynchronous function `fn` with a callback parameter that allows it * to call itself again, in series, indefinitely. * If an error is passed to the callback then `errback` is called with the * error, and execution stops, otherwise it will never be called. * * @name forever * @static * @memberOf module:ControlFlow * @method * @category Control Flow * @param {AsyncFunction} fn - an async function to call repeatedly. * Invoked with (next). * @param {Function} [errback] - when `fn` passes an error to it's callback, * this function will be called, and execution stops. Invoked with (err). * @example * * async.forever( * function(next) { * // next is suitable for passing to things that need a callback(err [, whatever]); * // it will result in this function being called again. * }, * function(err) { * // if next is called with a value in its first parameter, it will appear * // in here as 'err', and execution will stop. * } * ); */ function forever(fn, errback) { var done = onlyOnce(errback || noop); var task = wrapAsync(ensureAsync(fn)); function next(err) { if (err) return done(err); task(next); } next(); } /** * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. * * @name groupByLimit * @static * @memberOf module:Collections * @method * @see [async.groupBy]{@link module:Collections.groupBy} * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {number} limit - The maximum number of async operations at a time. * @param {AsyncFunction} iteratee - An async function to apply to each item in * `coll`. * The iteratee should complete with a `key` to group the value under. * Invoked with (value, callback). * @param {Function} [callback] - A callback which is called when all `iteratee` * functions have finished, or an error occurs. Result is an `Object` whoses * properties are arrays of values which returned the corresponding key. */ var groupByLimit = function(coll, limit, iteratee, callback) { callback = callback || noop; var _iteratee = wrapAsync(iteratee); mapLimit(coll, limit, function(val, callback) { _iteratee(val, function(err, key) { if (err) return callback(err); return callback(null, {key: key, val: val}); }); }, function(err, mapResults) { var result = {}; // from MDN, handle object having an `hasOwnProperty` prop var hasOwnProperty = Object.prototype.hasOwnProperty; for (var i = 0; i < mapResults.length; i++) { if (mapResults[i]) { var key = mapResults[i].key; var val = mapResults[i].val; if (hasOwnProperty.call(result, key)) { result[key].push(val); } else { result[key] = [val]; } } } return callback(err, result); }); }; /** * Returns a new object, where each value corresponds to an array of items, from * `coll`, that returned the corresponding key. That is, the keys of the object * correspond to the values passed to the `iteratee` callback. * * Note: Since this function applies the `iteratee` to each item in parallel, * there is no guarantee that the `iteratee` functions will complete in order. * However, the values for each key in the `result` will be in the same order as * the original `coll`. For Objects, the values will roughly be in the order of * the original Objects' keys (but this can vary across JavaScript engines). * * @name groupBy * @static * @memberOf module:Collections * @method * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {AsyncFunction} iteratee - An async function to apply to each item in * `coll`. * The iteratee should complete with a `key` to group the value under. * Invoked with (value, callback). * @param {Function} [callback] - A callback which is called when all `iteratee` * functions have finished, or an error occurs. Result is an `Object` whoses * properties are arrays of values which returned the corresponding key. * @example * * async.groupBy(['userId1', 'userId2', 'userId3'], function(userId, callback) { * db.findById(userId, function(err, user) { * if (err) return callback(err); * return callback(null, user.age); * }); * }, function(err, result) { * // result is object containing the userIds grouped by age * // e.g. { 30: ['userId1', 'userId3'], 42: ['userId2']}; * }); */ var groupBy = doLimit(groupByLimit, Infinity); /** * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. * * @name groupBySeries * @static * @memberOf module:Collections * @method * @see [async.groupBy]{@link module:Collections.groupBy} * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {number} limit - The maximum number of async operations at a time. * @param {AsyncFunction} iteratee - An async function to apply to each item in * `coll`. * The iteratee should complete with a `key` to group the value under. * Invoked with (value, callback). * @param {Function} [callback] - A callback which is called when all `iteratee` * functions have finished, or an error occurs. Result is an `Object` whoses * properties are arrays of values which returned the corresponding key. */ var groupBySeries = doLimit(groupByLimit, 1); /** * Logs the result of an `async` function to the `console`. Only works in * Node.js or in browsers that support `console.log` and `console.error` (such * as FF and Chrome). If multiple arguments are returned from the async * function, `console.log` is called on each argument in order. * * @name log * @static * @memberOf module:Utils * @method * @category Util * @param {AsyncFunction} function - The function you want to eventually apply * all arguments to. * @param {...*} arguments... - Any number of arguments to apply to the function. * @example * * // in a module * var hello = function(name, callback) { * setTimeout(function() { * callback(null, 'hello ' + name); * }, 1000); * }; * * // in the node repl * node> async.log(hello, 'world'); * 'hello world' */ var log = consoleFunc('log'); /** * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a * time. * * @name mapValuesLimit * @static * @memberOf module:Collections * @method * @see [async.mapValues]{@link module:Collections.mapValues} * @category Collection * @param {Object} obj - A collection to iterate over. * @param {number} limit - The maximum number of async operations at a time. * @param {AsyncFunction} iteratee - A function to apply to each value and key * in `coll`. * The iteratee should complete with the transformed value as its result. * Invoked with (value, key, callback). * @param {Function} [callback] - A callback which is called when all `iteratee` * functions have finished, or an error occurs. `result` is a new object consisting * of each key from `obj`, with each transformed value on the right-hand side. * Invoked with (err, result). */ function mapValuesLimit(obj, limit, iteratee, callback) { callback = once(callback || noop); var newObj = {}; var _iteratee = wrapAsync(iteratee); eachOfLimit(obj, limit, function(val, key, next) { _iteratee(val, key, function (err, result) { if (err) return next(err); newObj[key] = result; next(); }); }, function (err) { callback(err, newObj); }); } /** * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. * * Produces a new Object by mapping each value of `obj` through the `iteratee` * function. The `iteratee` is called each `value` and `key` from `obj` and a * callback for when it has finished processing. Each of these callbacks takes * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` * passes an error to its callback, the main `callback` (for the `mapValues` * function) is immediately called with the error. * * Note, the order of the keys in the result is not guaranteed. The keys will * be roughly in the order they complete, (but this is very engine-specific) * * @name mapValues * @static * @memberOf module:Collections * @method * @category Collection * @param {Object} obj - A collection to iterate over. * @param {AsyncFunction} iteratee - A function to apply to each value and key * in `coll`. * The iteratee should complete with the transformed value as its result. * Invoked with (value, key, callback). * @param {Function} [callback] - A callback which is called when all `iteratee` * functions have finished, or an error occurs. `result` is a new object consisting * of each key from `obj`, with each transformed value on the right-hand side. * Invoked with (err, result). * @example * * async.mapValues({ * f1: 'file1', * f2: 'file2', * f3: 'file3' * }, function (file, key, callback) { * fs.stat(file, callback); * }, function(err, result) { * // result is now a map of stats for each file, e.g. * // { * // f1: [stats for file1], * // f2: [stats for file2], * // f3: [stats for file3] * // } * }); */ var mapValues = doLimit(mapValuesLimit, Infinity); /** * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. * * @name mapValuesSeries * @static * @memberOf module:Collections * @method * @see [async.mapValues]{@link module:Collections.mapValues} * @category Collection * @param {Object} obj - A collection to iterate over. * @param {AsyncFunction} iteratee - A function to apply to each value and key * in `coll`. * The iteratee should complete with the transformed value as its result. * Invoked with (value, key, callback). * @param {Function} [callback] - A callback which is called when all `iteratee` * functions have finished, or an error occurs. `result` is a new object consisting * of each key from `obj`, with each transformed value on the right-hand side. * Invoked with (err, result). */ var mapValuesSeries = doLimit(mapValuesLimit, 1); function has(obj, key) { return key in obj; } /** * Caches the results of an async function. When creating a hash to store * function results against, the callback is omitted from the hash and an * optional hash function can be used. * * If no hash function is specified, the first argument is used as a hash key, * which may work reasonably if it is a string or a data type that converts to a * distinct string. Note that objects and arrays will not behave reasonably. * Neither will cases where the other arguments are significant. In such cases, * specify your own hash function. * * The cache of results is exposed as the `memo` property of the function * returned by `memoize`. * * @name memoize * @static * @memberOf module:Utils * @method * @category Util * @param {AsyncFunction} fn - The async function to proxy and cache results from. * @param {Function} hasher - An optional function for generating a custom hash * for storing results. It has all the arguments applied to it apart from the * callback, and must be synchronous. * @returns {AsyncFunction} a memoized version of `fn` * @example * * var slow_fn = function(name, callback) { * // do something * callback(null, result); * }; * var fn = async.memoize(slow_fn); * * // fn can now be used as if it were slow_fn * fn('some name', function() { * // callback * }); */ function memoize(fn, hasher) { var memo = Object.create(null); var queues = Object.create(null); hasher = hasher || identity; var _fn = wrapAsync(fn); var memoized = initialParams(function memoized(args, callback) { var key = hasher.apply(null, args); if (has(memo, key)) { setImmediate$1(function() { callback.apply(null, memo[key]); }); } else if (has(queues, key)) { queues[key].push(callback); } else { queues[key] = [callback]; _fn.apply(null, args.concat(function(/*args*/) { var args = slice(arguments); memo[key] = args; var q = queues[key]; delete queues[key]; for (var i = 0, l = q.length; i < l; i++) { q[i].apply(null, args); } })); } }); memoized.memo = memo; memoized.unmemoized = fn; return memoized; } /** * Calls `callback` on a later loop around the event loop. In Node.js this just * calls `process.nextTicl`. In the browser it will use `setImmediate` if * available, otherwise `setTimeout(callback, 0)`, which means other higher * priority events may precede the execution of `callback`. * * This is used internally for browser-compatibility purposes. * * @name nextTick * @static * @memberOf module:Utils * @method * @see [async.setImmediate]{@link module:Utils.setImmediate} * @category Util * @param {Function} callback - The function to call on a later loop around * the event loop. Invoked with (args...). * @param {...*} args... - any number of additional arguments to pass to the * callback on the next tick. * @example * * var call_order = []; * async.nextTick(function() { * call_order.push('two'); * // call_order now equals ['one','two'] * }); * call_order.push('one'); * * async.setImmediate(function (a, b, c) { * // a, b, and c equal 1, 2, and 3 * }, 1, 2, 3); */ var _defer$1; if (hasNextTick) { _defer$1 = process.nextTick; } else if (hasSetImmediate) { _defer$1 = setImmediate; } else { _defer$1 = fallback; } var nextTick = wrap(_defer$1); function _parallel(eachfn, tasks, callback) { callback = callback || noop; var results = isArrayLike(tasks) ? [] : {}; eachfn(tasks, function (task, key, callback) { wrapAsync(task)(function (err, result) { if (arguments.length > 2) { result = slice(arguments, 1); } results[key] = result; callback(err); }); }, function (err) { callback(err, results); }); } /** * Run the `tasks` collection of functions in parallel, without waiting until * the previous function has completed. If any of the functions pass an error to * its callback, the main `callback` is immediately called with the value of the * error. Once the `tasks` have completed, the results are passed to the final * `callback` as an array. * * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about * parallel execution of code. If your tasks do not use any timers or perform * any I/O, they will actually be executed in series. Any synchronous setup * sections for each task will happen one after the other. JavaScript remains * single-threaded. * * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the * execution of other tasks when a task fails. * * It is also possible to use an object instead of an array. Each property will * be run as a function and the results will be passed to the final `callback` * as an object instead of an array. This can be a more readable way of handling * results from {@link async.parallel}. * * @name parallel * @static * @memberOf module:ControlFlow * @method * @category Control Flow * @param {Array|Iterable|Object} tasks - A collection of * [async functions]{@link AsyncFunction} to run. * Each async function can complete with any number of optional `result` values. * @param {Function} [callback] - An optional callback to run once all the * functions have completed successfully. This function gets a results array * (or object) containing all the result arguments passed to the task callbacks. * Invoked with (err, results). * * @example * async.parallel([ * function(callback) { * setTimeout(function() { * callback(null, 'one'); * }, 200); * }, * function(callback) { * setTimeout(function() { * callback(null, 'two'); * }, 100); * } * ], * // optional callback * function(err, results) { * // the results array will equal ['one','two'] even though * // the second function had a shorter timeout. * }); * * // an example using an object instead of an array * async.parallel({ * one: function(callback) { * setTimeout(function() { * callback(null, 1); * }, 200); * }, * two: function(callback) { * setTimeout(function() { * callback(null, 2); * }, 100); * } * }, function(err, results) { * // results is now equals to: {one: 1, two: 2} * }); */ function parallelLimit(tasks, callback) { _parallel(eachOf, tasks, callback); } /** * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a * time. * * @name parallelLimit * @static * @memberOf module:ControlFlow * @method * @see [async.parallel]{@link module:ControlFlow.parallel} * @category Control Flow * @param {Array|Iterable|Object} tasks - A collection of * [async functions]{@link AsyncFunction} to run. * Each async function can complete with any number of optional `result` values. * @param {number} limit - The maximum number of async operations at a time. * @param {Function} [callback] - An optional callback to run once all the * functions have completed successfully. This function gets a results array * (or object) containing all the result arguments passed to the task callbacks. * Invoked with (err, results). */ function parallelLimit$1(tasks, limit, callback) { _parallel(_eachOfLimit(limit), tasks, callback); } /** * A queue of tasks for the worker function to complete. * @typedef {Object} QueueObject * @memberOf module:ControlFlow * @property {Function} length - a function returning the number of items * waiting to be processed. Invoke with `queue.length()`. * @property {boolean} started - a boolean indicating whether or not any * items have been pushed and processed by the queue. * @property {Function} running - a function returning the number of items * currently being processed. Invoke with `queue.running()`. * @property {Function} workersList - a function returning the array of items * currently being processed. Invoke with `queue.workersList()`. * @property {Function} idle - a function returning false if there are items * waiting or being processed, or true if not. Invoke with `queue.idle()`. * @property {number} concurrency - an integer for determining how many `worker` * functions should be run in parallel. This property can be changed after a * `queue` is created to alter the concurrency on-the-fly. * @property {Function} push - add a new task to the `queue`. Calls `callback` * once the `worker` has finished processing the task. Instead of a single task, * a `tasks` array can be submitted. The respective callback is used for every * task in the list. Invoke with `queue.push(task, [callback])`, * @property {Function} unshift - add a new task to the front of the `queue`. * Invoke with `queue.unshift(task, [callback])`. * @property {Function} remove - remove items from the queue that match a test * function. The test function will be passed an object with a `data` property, * and a `priority` property, if this is a * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. * Invoked with `queue.remove(testFn)`, where `testFn` is of the form * `function ({data, priority}) {}` and returns a Boolean. * @property {Function} saturated - a callback that is called when the number of * running workers hits the `concurrency` limit, and further tasks will be * queued. * @property {Function} unsaturated - a callback that is called when the number * of running workers is less than the `concurrency` & `buffer` limits, and * further tasks will not be queued. * @property {number} buffer - A minimum threshold buffer in order to say that * the `queue` is `unsaturated`. * @property {Function} empty - a callback that is called when the last item * from the `queue` is given to a `worker`. * @property {Function} drain - a callback that is called when the last item * from the `queue` has returned from the `worker`. * @property {Function} error - a callback that is called when a task errors. * Has the signature `function(error, task)`. * @property {boolean} paused - a boolean for determining whether the queue is * in a paused state. * @property {Function} pause - a function that pauses the processing of tasks * until `resume()` is called. Invoke with `queue.pause()`. * @property {Function} resume - a function that resumes the processing of * queued tasks when the queue is paused. Invoke with `queue.resume()`. * @property {Function} kill - a function that removes the `drain` callback and * empties remaining tasks from the queue forcing it to go idle. No more tasks * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. */ /** * Creates a `queue` object with the specified `concurrency`. Tasks added to the * `queue` are processed in parallel (up to the `concurrency` limit). If all * `worker`s are in progress, the task is queued until one becomes available. * Once a `worker` completes a `task`, that `task`'s callback is called. * * @name queue * @static * @memberOf module:ControlFlow * @method * @category Control Flow * @param {AsyncFunction} worker - An async function for processing a queued task. * If you want to handle errors from an individual task, pass a callback to * `q.push()`. Invoked with (task, callback). * @param {number} [concurrency=1] - An `integer` for determining how many * `worker` functions should be run in parallel. If omitted, the concurrency * defaults to `1`. If the concurrency is `0`, an error is thrown. * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can * attached as certain properties to listen for specific events during the * lifecycle of the queue. * @example * * // create a queue object with concurrency 2 * var q = async.queue(function(task, callback) { * console.log('hello ' + task.name); * callback(); * }, 2); * * // assign a callback * q.drain = function() { * console.log('all items have been processed'); * }; * * // add some items to the queue * q.push({name: 'foo'}, function(err) { * console.log('finished processing foo'); * }); * q.push({name: 'bar'}, function (err) { * console.log('finished processing bar'); * }); * * // add some items to the queue (batch-wise) * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { * console.log('finished processing item'); * }); * * // add some items to the front of the queue * q.unshift({name: 'bar'}, function (err) { * console.log('finished processing bar'); * }); */ var queue$1 = function (worker, concurrency) { var _worker = wrapAsync(worker); return queue(function (items, cb) { _worker(items[0], cb); }, concurrency, 1); }; /** * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and * completed in ascending priority order. * * @name priorityQueue * @static * @memberOf module:ControlFlow * @method * @see [async.queue]{@link module:ControlFlow.queue} * @category Control Flow * @param {AsyncFunction} worker - An async function for processing a queued task. * If you want to handle errors from an individual task, pass a callback to * `q.push()`. * Invoked with (task, callback). * @param {number} concurrency - An `integer` for determining how many `worker` * functions should be run in parallel. If omitted, the concurrency defaults to * `1`. If the concurrency is `0`, an error is thrown. * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are two * differences between `queue` and `priorityQueue` objects: * * `push(task, priority, [callback])` - `priority` should be a number. If an * array of `tasks` is given, all tasks will be assigned the same priority. * * The `unshift` method was removed. */ var priorityQueue = function(worker, concurrency) { // Start with a normal queue var q = queue$1(worker, concurrency); // Override push to accept second parameter representing priority q.push = function(data, priority, callback) { if (callback == null) callback = noop; if (typeof callback !== 'function') { throw new Error('task callback must be a function'); } q.started = true; if (!isArray(data)) { data = [data]; } if (data.length === 0) { // call drain immediately if there are no tasks return setImmediate$1(function() { q.drain(); }); } priority = priority || 0; var nextNode = q._tasks.head; while (nextNode && priority >= nextNode.priority) { nextNode = nextNode.next; } for (var i = 0, l = data.length; i < l; i++) { var item = { data: data[i], priority: priority, callback: callback }; if (nextNode) { q._tasks.insertBefore(nextNode, item); } else { q._tasks.push(item); } } setImmediate$1(q.process); }; // Remove unshift function delete q.unshift; return q; }; /** * Runs the `tasks` array of functions in parallel, without waiting until the * previous function has completed. Once any of the `tasks` complete or pass an * error to its callback, the main `callback` is immediately called. It's * equivalent to `Promise.race()`. * * @name race * @static * @memberOf module:ControlFlow * @method * @category Control Flow * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} * to run. Each function can complete with an optional `result` value. * @param {Function} callback - A callback to run once any of the functions have * completed. This function gets an error or result from the first function that * completed. Invoked with (err, result). * @returns undefined * @example * * async.race([ * function(callback) { * setTimeout(function() { * callback(null, 'one'); * }, 200); * }, * function(callback) { * setTimeout(function() { * callback(null, 'two'); * }, 100); * } * ], * // main callback * function(err, result) { * // the result will be equal to 'two' as it finishes earlier * }); */ function race(tasks, callback) { callback = once(callback || noop); if (!isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); if (!tasks.length) return callback(); for (var i = 0, l = tasks.length; i < l; i++) { wrapAsync(tasks[i])(callback); } } /** * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. * * @name reduceRight * @static * @memberOf module:Collections * @method * @see [async.reduce]{@link module:Collections.reduce} * @alias foldr * @category Collection * @param {Array} array - A collection to iterate over. * @param {*} memo - The initial state of the reduction. * @param {AsyncFunction} iteratee - A function applied to each item in the * array to produce the next step in the reduction. * The `iteratee` should complete with the next state of the reduction. * If the iteratee complete with an error, the reduction is stopped and the * main `callback` is immediately called with the error. * Invoked with (memo, item, callback). * @param {Function} [callback] - A callback which is called after all the * `iteratee` functions have finished. Result is the reduced value. Invoked with * (err, result). */ function reduceRight (array, memo, iteratee, callback) { var reversed = slice(array).reverse(); reduce(reversed, memo, iteratee, callback); } /** * Wraps the async function in another function that always completes with a * result object, even when it errors. * * The result object has either the property `error` or `value`. * * @name reflect * @static * @memberOf module:Utils * @method * @category Util * @param {AsyncFunction} fn - The async function you want to wrap * @returns {Function} - A function that always passes null to it's callback as * the error. The second argument to the callback will be an `object` with * either an `error` or a `value` property. * @example * * async.parallel([ * async.reflect(function(callback) { * // do some stuff ... * callback(null, 'one'); * }), * async.reflect(function(callback) { * // do some more stuff but error ... * callback('bad stuff happened'); * }), * async.reflect(function(callback) { * // do some more stuff ... * callback(null, 'two'); * }) * ], * // optional callback * function(err, results) { * // values * // results[0].value = 'one' * // results[1].error = 'bad stuff happened' * // results[2].value = 'two' * }); */ function reflect(fn) { var _fn = wrapAsync(fn); return initialParams(function reflectOn(args, reflectCallback) { args.push(function callback(error, cbArg) { if (error) { reflectCallback(null, { error: error }); } else { var value; if (arguments.length <= 2) { value = cbArg; } else { value = slice(arguments, 1); } reflectCallback(null, { value: value }); } }); return _fn.apply(this, args); }); } /** * A helper function that wraps an array or an object of functions with `reflect`. * * @name reflectAll * @static * @memberOf module:Utils * @method * @see [async.reflect]{@link module:Utils.reflect} * @category Util * @param {Array|Object|Iterable} tasks - The collection of * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. * @returns {Array} Returns an array of async functions, each wrapped in * `async.reflect` * @example * * let tasks = [ * function(callback) { * setTimeout(function() { * callback(null, 'one'); * }, 200); * }, * function(callback) { * // do some more stuff but error ... * callback(new Error('bad stuff happened')); * }, * function(callback) { * setTimeout(function() { * callback(null, 'two'); * }, 100); * } * ]; * * async.parallel(async.reflectAll(tasks), * // optional callback * function(err, results) { * // values * // results[0].value = 'one' * // results[1].error = Error('bad stuff happened') * // results[2].value = 'two' * }); * * // an example using an object instead of an array * let tasks = { * one: function(callback) { * setTimeout(function() { * callback(null, 'one'); * }, 200); * }, * two: function(callback) { * callback('two'); * }, * three: function(callback) { * setTimeout(function() { * callback(null, 'three'); * }, 100); * } * }; * * async.parallel(async.reflectAll(tasks), * // optional callback * function(err, results) { * // values * // results.one.value = 'one' * // results.two.error = 'two' * // results.three.value = 'three' * }); */ function reflectAll(tasks) { var results; if (isArray(tasks)) { results = arrayMap(tasks, reflect); } else { results = {}; baseForOwn(tasks, function(task, key) { results[key] = reflect.call(this, task); }); } return results; } function reject$1(eachfn, arr, iteratee, callback) { _filter(eachfn, arr, function(value, cb) { iteratee(value, function(err, v) { cb(err, !v); }); }, callback); } /** * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. * * @name reject * @static * @memberOf module:Collections * @method * @see [async.filter]{@link module:Collections.filter} * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {Function} iteratee - An async truth test to apply to each item in * `coll`. * The should complete with a boolean value as its `result`. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called after all the * `iteratee` functions have finished. Invoked with (err, results). * @example * * async.reject(['file1','file2','file3'], function(filePath, callback) { * fs.access(filePath, function(err) { * callback(null, !err) * }); * }, function(err, results) { * // results now equals an array of missing files * createFiles(results); * }); */ var reject = doParallel(reject$1); /** * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a * time. * * @name rejectLimit * @static * @memberOf module:Collections * @method * @see [async.reject]{@link module:Collections.reject} * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {number} limit - The maximum number of async operations at a time. * @param {Function} iteratee - An async truth test to apply to each item in * `coll`. * The should complete with a boolean value as its `result`. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called after all the * `iteratee` functions have finished. Invoked with (err, results). */ var rejectLimit = doParallelLimit(reject$1); /** * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. * * @name rejectSeries * @static * @memberOf module:Collections * @method * @see [async.reject]{@link module:Collections.reject} * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {Function} iteratee - An async truth test to apply to each item in * `coll`. * The should complete with a boolean value as its `result`. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called after all the * `iteratee` functions have finished. Invoked with (err, results). */ var rejectSeries = doLimit(rejectLimit, 1); /** * Creates a function that returns `value`. * * @static * @memberOf _ * @since 2.4.0 * @category Util * @param {*} value The value to return from the new function. * @returns {Function} Returns the new constant function. * @example * * var objects = _.times(2, _.constant({ 'a': 1 })); * * console.log(objects); * // => [{ 'a': 1 }, { 'a': 1 }] * * console.log(objects[0] === objects[1]); * // => true */ function constant$1(value) { return function() { return value; }; } /** * Attempts to get a successful response from `task` no more than `times` times * before returning an error. If the task is successful, the `callback` will be * passed the result of the successful task. If all attempts fail, the callback * will be passed the error and result (if any) of the final attempt. * * @name retry * @static * @memberOf module:ControlFlow * @method * @category Control Flow * @see [async.retryable]{@link module:ControlFlow.retryable} * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an * object with `times` and `interval` or a number. * * `times` - The number of attempts to make before giving up. The default * is `5`. * * `interval` - The time to wait between retries, in milliseconds. The * default is `0`. The interval may also be specified as a function of the * retry count (see example). * * `errorFilter` - An optional synchronous function that is invoked on * erroneous result. If it returns `true` the retry attempts will continue; * if the function returns `false` the retry flow is aborted with the current * attempt's error and result being returned to the final callback. * Invoked with (err). * * If `opts` is a number, the number specifies the number of times to retry, * with the default interval of `0`. * @param {AsyncFunction} task - An async function to retry. * Invoked with (callback). * @param {Function} [callback] - An optional callback which is called when the * task has succeeded, or after the final failed attempt. It receives the `err` * and `result` arguments of the last attempt at completing the `task`. Invoked * with (err, results). * * @example * * // The `retry` function can be used as a stand-alone control flow by passing * // a callback, as shown below: * * // try calling apiMethod 3 times * async.retry(3, apiMethod, function(err, result) { * // do something with the result * }); * * // try calling apiMethod 3 times, waiting 200 ms between each retry * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { * // do something with the result * }); * * // try calling apiMethod 10 times with exponential backoff * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) * async.retry({ * times: 10, * interval: function(retryCount) { * return 50 * Math.pow(2, retryCount); * } * }, apiMethod, function(err, result) { * // do something with the result * }); * * // try calling apiMethod the default 5 times no delay between each retry * async.retry(apiMethod, function(err, result) { * // do something with the result * }); * * // try calling apiMethod only when error condition satisfies, all other * // errors will abort the retry control flow and return to final callback * async.retry({ * errorFilter: function(err) { * return err.message === 'Temporary error'; // only retry on a specific error * } * }, apiMethod, function(err, result) { * // do something with the result * }); * * // to retry individual methods that are not as reliable within other * // control flow functions, use the `retryable` wrapper: * async.auto({ * users: api.getUsers.bind(api), * payments: async.retryable(3, api.getPayments.bind(api)) * }, function(err, results) { * // do something with the results * }); * */ function retry(opts, task, callback) { var DEFAULT_TIMES = 5; var DEFAULT_INTERVAL = 0; var options = { times: DEFAULT_TIMES, intervalFunc: constant$1(DEFAULT_INTERVAL) }; function parseTimes(acc, t) { if (typeof t === 'object') { acc.times = +t.times || DEFAULT_TIMES; acc.intervalFunc = typeof t.interval === 'function' ? t.interval : constant$1(+t.interval || DEFAULT_INTERVAL); acc.errorFilter = t.errorFilter; } else if (typeof t === 'number' || typeof t === 'string') { acc.times = +t || DEFAULT_TIMES; } else { throw new Error("Invalid arguments for async.retry"); } } if (arguments.length < 3 && typeof opts === 'function') { callback = task || noop; task = opts; } else { parseTimes(options, opts); callback = callback || noop; } if (typeof task !== 'function') { throw new Error("Invalid arguments for async.retry"); } var _task = wrapAsync(task); var attempt = 1; function retryAttempt() { _task(function(err) { if (err && attempt++ < options.times && (typeof options.errorFilter != 'function' || options.errorFilter(err))) { setTimeout(retryAttempt, options.intervalFunc(attempt)); } else { callback.apply(null, arguments); } }); } retryAttempt(); } /** * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method * wraps a task and makes it retryable, rather than immediately calling it * with retries. * * @name retryable * @static * @memberOf module:ControlFlow * @method * @see [async.retry]{@link module:ControlFlow.retry} * @category Control Flow * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional * options, exactly the same as from `retry` * @param {AsyncFunction} task - the asynchronous function to wrap. * This function will be passed any arguments passed to the returned wrapper. * Invoked with (...args, callback). * @returns {AsyncFunction} The wrapped function, which when invoked, will * retry on an error, based on the parameters specified in `opts`. * This function will accept the same parameters as `task`. * @example * * async.auto({ * dep1: async.retryable(3, getFromFlakyService), * process: ["dep1", async.retryable(3, function (results, cb) { * maybeProcessData(results.dep1, cb); * })] * }, callback); */ var retryable = function (opts, task) { if (!task) { task = opts; opts = null; } var _task = wrapAsync(task); return initialParams(function (args, callback) { function taskFn(cb) { _task.apply(null, args.concat(cb)); } if (opts) retry(opts, taskFn, callback); else retry(taskFn, callback); }); }; /** * Run the functions in the `tasks` collection in series, each one running once * the previous function has completed. If any functions in the series pass an * error to its callback, no more functions are run, and `callback` is * immediately called with the value of the error. Otherwise, `callback` * receives an array of results when `tasks` have completed. * * It is also possible to use an object instead of an array. Each property will * be run as a function, and the results will be passed to the final `callback` * as an object instead of an array. This can be a more readable way of handling * results from {@link async.series}. * * **Note** that while many implementations preserve the order of object * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) * explicitly states that * * > The mechanics and order of enumerating the properties is not specified. * * So if you rely on the order in which your series of functions are executed, * and want this to work on all platforms, consider using an array. * * @name series * @static * @memberOf module:ControlFlow * @method * @category Control Flow * @param {Array|Iterable|Object} tasks - A collection containing * [async functions]{@link AsyncFunction} to run in series. * Each function can complete with any number of optional `result` values. * @param {Function} [callback] - An optional callback to run once all the * functions have completed. This function gets a results array (or object) * containing all the result arguments passed to the `task` callbacks. Invoked * with (err, result). * @example * async.series([ * function(callback) { * // do some stuff ... * callback(null, 'one'); * }, * function(callback) { * // do some more stuff ... * callback(null, 'two'); * } * ], * // optional callback * function(err, results) { * // results is now equal to ['one', 'two'] * }); * * async.series({ * one: function(callback) { * setTimeout(function() { * callback(null, 1); * }, 200); * }, * two: function(callback){ * setTimeout(function() { * callback(null, 2); * }, 100); * } * }, function(err, results) { * // results is now equal to: {one: 1, two: 2} * }); */ function series(tasks, callback) { _parallel(eachOfSeries, tasks, callback); } /** * Returns `true` if at least one element in the `coll` satisfies an async test. * If any iteratee call returns `true`, the main `callback` is immediately * called. * * @name some * @static * @memberOf module:Collections * @method * @alias any * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {AsyncFunction} iteratee - An async truth test to apply to each item * in the collections in parallel. * The iteratee should complete with a boolean `result` value. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called as soon as any * iteratee returns `true`, or after all the iteratee functions have finished. * Result will be either `true` or `false` depending on the values of the async * tests. Invoked with (err, result). * @example * * async.some(['file1','file2','file3'], function(filePath, callback) { * fs.access(filePath, function(err) { * callback(null, !err) * }); * }, function(err, result) { * // if result is true then at least one of the files exists * }); */ var some = doParallel(_createTester(Boolean, identity)); /** * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. * * @name someLimit * @static * @memberOf module:Collections * @method * @see [async.some]{@link module:Collections.some} * @alias anyLimit * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {number} limit - The maximum number of async operations at a time. * @param {AsyncFunction} iteratee - An async truth test to apply to each item * in the collections in parallel. * The iteratee should complete with a boolean `result` value. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called as soon as any * iteratee returns `true`, or after all the iteratee functions have finished. * Result will be either `true` or `false` depending on the values of the async * tests. Invoked with (err, result). */ var someLimit = doParallelLimit(_createTester(Boolean, identity)); /** * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. * * @name someSeries * @static * @memberOf module:Collections * @method * @see [async.some]{@link module:Collections.some} * @alias anySeries * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {AsyncFunction} iteratee - An async truth test to apply to each item * in the collections in series. * The iteratee should complete with a boolean `result` value. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called as soon as any * iteratee returns `true`, or after all the iteratee functions have finished. * Result will be either `true` or `false` depending on the values of the async * tests. Invoked with (err, result). */ var someSeries = doLimit(someLimit, 1); /** * Sorts a list by the results of running each `coll` value through an async * `iteratee`. * * @name sortBy * @static * @memberOf module:Collections * @method * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {AsyncFunction} iteratee - An async function to apply to each item in * `coll`. * The iteratee should complete with a value to use as the sort criteria as * its `result`. * Invoked with (item, callback). * @param {Function} callback - A callback which is called after all the * `iteratee` functions have finished, or an error occurs. Results is the items * from the original `coll` sorted by the values returned by the `iteratee` * calls. Invoked with (err, results). * @example * * async.sortBy(['file1','file2','file3'], function(file, callback) { * fs.stat(file, function(err, stats) { * callback(err, stats.mtime); * }); * }, function(err, results) { * // results is now the original array of files sorted by * // modified date * }); * * // By modifying the callback parameter the * // sorting order can be influenced: * * // ascending order * async.sortBy([1,9,3,5], function(x, callback) { * callback(null, x); * }, function(err,result) { * // result callback * }); * * // descending order * async.sortBy([1,9,3,5], function(x, callback) { * callback(null, x*-1); //<- x*-1 instead of x, turns the order around * }, function(err,result) { * // result callback * }); */ function sortBy (coll, iteratee, callback) { var _iteratee = wrapAsync(iteratee); map(coll, function (x, callback) { _iteratee(x, function (err, criteria) { if (err) return callback(err); callback(null, {value: x, criteria: criteria}); }); }, function (err, results) { if (err) return callback(err); callback(null, arrayMap(results.sort(comparator), baseProperty('value'))); }); function comparator(left, right) { var a = left.criteria, b = right.criteria; return a < b ? -1 : a > b ? 1 : 0; } } /** * Sets a time limit on an asynchronous function. If the function does not call * its callback within the specified milliseconds, it will be called with a * timeout error. The code property for the error object will be `'ETIMEDOUT'`. * * @name timeout * @static * @memberOf module:Utils * @method * @category Util * @param {AsyncFunction} asyncFn - The async function to limit in time. * @param {number} milliseconds - The specified time limit. * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) * to timeout Error for more information.. * @returns {AsyncFunction} Returns a wrapped function that can be used with any * of the control flow functions. * Invoke this function with the same parameters as you would `asyncFunc`. * @example * * function myFunction(foo, callback) { * doAsyncTask(foo, function(err, data) { * // handle errors * if (err) return callback(err); * * // do some stuff ... * * // return processed data * return callback(null, data); * }); * } * * var wrapped = async.timeout(myFunction, 1000); * * // call `wrapped` as you would `myFunction` * wrapped({ bar: 'bar' }, function(err, data) { * // if `myFunction` takes < 1000 ms to execute, `err` * // and `data` will have their expected values * * // else `err` will be an Error with the code 'ETIMEDOUT' * }); */ function timeout(asyncFn, milliseconds, info) { var fn = wrapAsync(asyncFn); return initialParams(function (args, callback) { var timedOut = false; var timer; function timeoutCallback() { var name = asyncFn.name || 'anonymous'; var error = new Error('Callback function "' + name + '" timed out.'); error.code = 'ETIMEDOUT'; if (info) { error.info = info; } timedOut = true; callback(error); } args.push(function () { if (!timedOut) { callback.apply(null, arguments); clearTimeout(timer); } }); // setup timer and call original function timer = setTimeout(timeoutCallback, milliseconds); fn.apply(null, args); }); } /* Built-in method references for those with the same name as other `lodash` methods. */ var nativeCeil = Math.ceil; var nativeMax = Math.max; /** * The base implementation of `_.range` and `_.rangeRight` which doesn't * coerce arguments. * * @private * @param {number} start The start of the range. * @param {number} end The end of the range. * @param {number} step The value to increment or decrement by. * @param {boolean} [fromRight] Specify iterating from right to left. * @returns {Array} Returns the range of numbers. */ function baseRange(start, end, step, fromRight) { var index = -1, length = nativeMax(nativeCeil((end - start) / (step || 1)), 0), result = Array(length); while (length--) { result[fromRight ? length : ++index] = start; start += step; } return result; } /** * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a * time. * * @name timesLimit * @static * @memberOf module:ControlFlow * @method * @see [async.times]{@link module:ControlFlow.times} * @category Control Flow * @param {number} count - The number of times to run the function. * @param {number} limit - The maximum number of async operations at a time. * @param {AsyncFunction} iteratee - The async function to call `n` times. * Invoked with the iteration index and a callback: (n, next). * @param {Function} callback - see [async.map]{@link module:Collections.map}. */ function timeLimit(count, limit, iteratee, callback) { var _iteratee = wrapAsync(iteratee); mapLimit(baseRange(0, count, 1), limit, _iteratee, callback); } /** * Calls the `iteratee` function `n` times, and accumulates results in the same * manner you would use with [map]{@link module:Collections.map}. * * @name times * @static * @memberOf module:ControlFlow * @method * @see [async.map]{@link module:Collections.map} * @category Control Flow * @param {number} n - The number of times to run the function. * @param {AsyncFunction} iteratee - The async function to call `n` times. * Invoked with the iteration index and a callback: (n, next). * @param {Function} callback - see {@link module:Collections.map}. * @example * * // Pretend this is some complicated async factory * var createUser = function(id, callback) { * callback(null, { * id: 'user' + id * }); * }; * * // generate 5 users * async.times(5, function(n, next) { * createUser(n, function(err, user) { * next(err, user); * }); * }, function(err, users) { * // we should now have 5 users * }); */ var times = doLimit(timeLimit, Infinity); /** * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. * * @name timesSeries * @static * @memberOf module:ControlFlow * @method * @see [async.times]{@link module:ControlFlow.times} * @category Control Flow * @param {number} n - The number of times to run the function. * @param {AsyncFunction} iteratee - The async function to call `n` times. * Invoked with the iteration index and a callback: (n, next). * @param {Function} callback - see {@link module:Collections.map}. */ var timesSeries = doLimit(timeLimit, 1); /** * A relative of `reduce`. Takes an Object or Array, and iterates over each * element in series, each step potentially mutating an `accumulator` value. * The type of the accumulator defaults to the type of collection passed in. * * @name transform * @static * @memberOf module:Collections * @method * @category Collection * @param {Array|Iterable|Object} coll - A collection to iterate over. * @param {*} [accumulator] - The initial state of the transform. If omitted, * it will default to an empty Object or Array, depending on the type of `coll` * @param {AsyncFunction} iteratee - A function applied to each item in the * collection that potentially modifies the accumulator. * Invoked with (accumulator, item, key, callback). * @param {Function} [callback] - A callback which is called after all the * `iteratee` functions have finished. Result is the transformed accumulator. * Invoked with (err, result). * @example * * async.transform([1,2,3], function(acc, item, index, callback) { * // pointless async: * process.nextTick(function() { * acc.push(item * 2) * callback(null) * }); * }, function(err, result) { * // result is now equal to [2, 4, 6] * }); * * @example * * async.transform({a: 1, b: 2, c: 3}, function (obj, val, key, callback) { * setImmediate(function () { * obj[key] = val * 2; * callback(); * }) * }, function (err, result) { * // result is equal to {a: 2, b: 4, c: 6} * }) */ function transform (coll, accumulator, iteratee, callback) { if (arguments.length <= 3) { callback = iteratee; iteratee = accumulator; accumulator = isArray(coll) ? [] : {}; } callback = once(callback || noop); var _iteratee = wrapAsync(iteratee); eachOf(coll, function(v, k, cb) { _iteratee(accumulator, v, k, cb); }, function(err) { callback(err, accumulator); }); } /** * It runs each task in series but stops whenever any of the functions were * successful. If one of the tasks were successful, the `callback` will be * passed the result of the successful task. If all tasks fail, the callback * will be passed the error and result (if any) of the final attempt. * * @name tryEach * @static * @memberOf module:ControlFlow * @method * @category Control Flow * @param {Array|Iterable|Object} tasks - A collection containing functions to * run, each function is passed a `callback(err, result)` it must call on * completion with an error `err` (which can be `null`) and an optional `result` * value. * @param {Function} [callback] - An optional callback which is called when one * of the tasks has succeeded, or all have failed. It receives the `err` and * `result` arguments of the last attempt at completing the `task`. Invoked with * (err, results). * @example * async.tryEach([ * function getDataFromFirstWebsite(callback) { * // Try getting the data from the first website * callback(err, data); * }, * function getDataFromSecondWebsite(callback) { * // First website failed, * // Try getting the data from the backup website * callback(err, data); * } * ], * // optional callback * function(err, results) { * Now do something with the data. * }); * */ function tryEach(tasks, callback) { var error = null; var result; callback = callback || noop; eachSeries(tasks, function(task, callback) { wrapAsync(task)(function (err, res/*, ...args*/) { if (arguments.length > 2) { result = slice(arguments, 1); } else { result = res; } error = err; callback(!err); }); }, function () { callback(error, result); }); } /** * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, * unmemoized form. Handy for testing. * * @name unmemoize * @static * @memberOf module:Utils * @method * @see [async.memoize]{@link module:Utils.memoize} * @category Util * @param {AsyncFunction} fn - the memoized function * @returns {AsyncFunction} a function that calls the original unmemoized function */ function unmemoize(fn) { return function () { return (fn.unmemoized || fn).apply(null, arguments); }; } /** * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when * stopped, or an error occurs. * * @name whilst * @static * @memberOf module:ControlFlow * @method * @category Control Flow * @param {Function} test - synchronous truth test to perform before each * execution of `iteratee`. Invoked with (). * @param {AsyncFunction} iteratee - An async function which is called each time * `test` passes. Invoked with (callback). * @param {Function} [callback] - A callback which is called after the test * function has failed and repeated execution of `iteratee` has stopped. `callback` * will be passed an error and any arguments passed to the final `iteratee`'s * callback. Invoked with (err, [results]); * @returns undefined * @example * * var count = 0; * async.whilst( * function() { return count < 5; }, * function(callback) { * count++; * setTimeout(function() { * callback(null, count); * }, 1000); * }, * function (err, n) { * // 5 seconds have passed, n = 5 * } * ); */ function whilst(test, iteratee, callback) { callback = onlyOnce(callback || noop); var _iteratee = wrapAsync(iteratee); if (!test()) return callback(null); var next = function(err/*, ...args*/) { if (err) return callback(err); if (test()) return _iteratee(next); var args = slice(arguments, 1); callback.apply(null, [null].concat(args)); }; _iteratee(next); } /** * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when * stopped, or an error occurs. `callback` will be passed an error and any * arguments passed to the final `iteratee`'s callback. * * The inverse of [whilst]{@link module:ControlFlow.whilst}. * * @name until * @static * @memberOf module:ControlFlow * @method * @see [async.whilst]{@link module:ControlFlow.whilst} * @category Control Flow * @param {Function} test - synchronous truth test to perform before each * execution of `iteratee`. Invoked with (). * @param {AsyncFunction} iteratee - An async function which is called each time * `test` fails. Invoked with (callback). * @param {Function} [callback] - A callback which is called after the test * function has passed and repeated execution of `iteratee` has stopped. `callback` * will be passed an error and any arguments passed to the final `iteratee`'s * callback. Invoked with (err, [results]); */ function until(test, iteratee, callback) { whilst(function() { return !test.apply(this, arguments); }, iteratee, callback); } /** * Runs the `tasks` array of functions in series, each passing their results to * the next in the array. However, if any of the `tasks` pass an error to their * own callback, the next function is not executed, and the main `callback` is * immediately called with the error. * * @name waterfall * @static * @memberOf module:ControlFlow * @method * @category Control Flow * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} * to run. * Each function should complete with any number of `result` values. * The `result` values will be passed as arguments, in order, to the next task. * @param {Function} [callback] - An optional callback to run once all the * functions have completed. This will be passed the results of the last task's * callback. Invoked with (err, [results]). * @returns undefined * @example * * async.waterfall([ * function(callback) { * callback(null, 'one', 'two'); * }, * function(arg1, arg2, callback) { * // arg1 now equals 'one' and arg2 now equals 'two' * callback(null, 'three'); * }, * function(arg1, callback) { * // arg1 now equals 'three' * callback(null, 'done'); * } * ], function (err, result) { * // result now equals 'done' * }); * * // Or, with named functions: * async.waterfall([ * myFirstFunction, * mySecondFunction, * myLastFunction, * ], function (err, result) { * // result now equals 'done' * }); * function myFirstFunction(callback) { * callback(null, 'one', 'two'); * } * function mySecondFunction(arg1, arg2, callback) { * // arg1 now equals 'one' and arg2 now equals 'two' * callback(null, 'three'); * } * function myLastFunction(arg1, callback) { * // arg1 now equals 'three' * callback(null, 'done'); * } */ var waterfall = function(tasks, callback) { callback = once(callback || noop); if (!isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); if (!tasks.length) return callback(); var taskIndex = 0; function nextTask(args) { var task = wrapAsync(tasks[taskIndex++]); args.push(onlyOnce(next)); task.apply(null, args); } function next(err/*, ...args*/) { if (err || taskIndex === tasks.length) { return callback.apply(null, arguments); } nextTask(slice(arguments, 1)); } nextTask([]); }; /** * An "async function" in the context of Async is an asynchronous function with * a variable number of parameters, with the final parameter being a callback. * (`function (arg1, arg2, ..., callback) {}`) * The final callback is of the form `callback(err, results...)`, which must be * called once the function is completed. The callback should be called with a * Error as its first argument to signal that an error occurred. * Otherwise, if no error occurred, it should be called with `null` as the first * argument, and any additional `result` arguments that may apply, to signal * successful completion. * The callback must be called exactly once, ideally on a later tick of the * JavaScript event loop. * * This type of function is also referred to as a "Node-style async function", * or a "continuation passing-style function" (CPS). Most of the methods of this * library are themselves CPS/Node-style async functions, or functions that * return CPS/Node-style async functions. * * Wherever we accept a Node-style async function, we also directly accept an * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. * In this case, the `async` function will not be passed a final callback * argument, and any thrown error will be used as the `err` argument of the * implicit callback, and the return value will be used as the `result` value. * (i.e. a `rejected` of the returned Promise becomes the `err` callback * argument, and a `resolved` value becomes the `result`.) * * Note, due to JavaScript limitations, we can only detect native `async` * functions and not transpilied implementations. * Your environment must have `async`/`await` support for this to work. * (e.g. Node > v7.6, or a recent version of a modern browser). * If you are using `async` functions through a transpiler (e.g. Babel), you * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, * because the `async function` will be compiled to an ordinary function that * returns a promise. * * @typedef {Function} AsyncFunction * @static */ /** * Async is a utility module which provides straight-forward, powerful functions * for working with asynchronous JavaScript. Although originally designed for * use with [Node.js](http://nodejs.org) and installable via * `npm install --save async`, it can also be used directly in the browser. * @module async * @see AsyncFunction */ /** * A collection of `async` functions for manipulating collections, such as * arrays and objects. * @module Collections */ /** * A collection of `async` functions for controlling the flow through a script. * @module ControlFlow */ /** * A collection of `async` utility functions. * @module Utils */ var index = { apply: apply, applyEach: applyEach, applyEachSeries: applyEachSeries, asyncify: asyncify, auto: auto, autoInject: autoInject, cargo: cargo, compose: compose, concat: concat, concatLimit: concatLimit, concatSeries: concatSeries, constant: constant, detect: detect, detectLimit: detectLimit, detectSeries: detectSeries, dir: dir, doDuring: doDuring, doUntil: doUntil, doWhilst: doWhilst, during: during, each: eachLimit, eachLimit: eachLimit$1, eachOf: eachOf, eachOfLimit: eachOfLimit, eachOfSeries: eachOfSeries, eachSeries: eachSeries, ensureAsync: ensureAsync, every: every, everyLimit: everyLimit, everySeries: everySeries, filter: filter, filterLimit: filterLimit, filterSeries: filterSeries, forever: forever, groupBy: groupBy, groupByLimit: groupByLimit, groupBySeries: groupBySeries, log: log, map: map, mapLimit: mapLimit, mapSeries: mapSeries, mapValues: mapValues, mapValuesLimit: mapValuesLimit, mapValuesSeries: mapValuesSeries, memoize: memoize, nextTick: nextTick, parallel: parallelLimit, parallelLimit: parallelLimit$1, priorityQueue: priorityQueue, queue: queue$1, race: race, reduce: reduce, reduceRight: reduceRight, reflect: reflect, reflectAll: reflectAll, reject: reject, rejectLimit: rejectLimit, rejectSeries: rejectSeries, retry: retry, retryable: retryable, seq: seq, series: series, setImmediate: setImmediate$1, some: some, someLimit: someLimit, someSeries: someSeries, sortBy: sortBy, timeout: timeout, times: times, timesLimit: timeLimit, timesSeries: timesSeries, transform: transform, tryEach: tryEach, unmemoize: unmemoize, until: until, waterfall: waterfall, whilst: whilst, // aliases all: every, allLimit: everyLimit, allSeries: everySeries, any: some, anyLimit: someLimit, anySeries: someSeries, find: detect, findLimit: detectLimit, findSeries: detectSeries, forEach: eachLimit, forEachSeries: eachSeries, forEachLimit: eachLimit$1, forEachOf: eachOf, forEachOfSeries: eachOfSeries, forEachOfLimit: eachOfLimit, inject: reduce, foldl: reduce, foldr: reduceRight, select: filter, selectLimit: filterLimit, selectSeries: filterSeries, wrapSync: asyncify }; exports['default'] = index; exports.apply = apply; exports.applyEach = applyEach; exports.applyEachSeries = applyEachSeries; exports.asyncify = asyncify; exports.auto = auto; exports.autoInject = autoInject; exports.cargo = cargo; exports.compose = compose; exports.concat = concat; exports.concatLimit = concatLimit; exports.concatSeries = concatSeries; exports.constant = constant; exports.detect = detect; exports.detectLimit = detectLimit; exports.detectSeries = detectSeries; exports.dir = dir; exports.doDuring = doDuring; exports.doUntil = doUntil; exports.doWhilst = doWhilst; exports.during = during; exports.each = eachLimit; exports.eachLimit = eachLimit$1; exports.eachOf = eachOf; exports.eachOfLimit = eachOfLimit; exports.eachOfSeries = eachOfSeries; exports.eachSeries = eachSeries; exports.ensureAsync = ensureAsync; exports.every = every; exports.everyLimit = everyLimit; exports.everySeries = everySeries; exports.filter = filter; exports.filterLimit = filterLimit; exports.filterSeries = filterSeries; exports.forever = forever; exports.groupBy = groupBy; exports.groupByLimit = groupByLimit; exports.groupBySeries = groupBySeries; exports.log = log; exports.map = map; exports.mapLimit = mapLimit; exports.mapSeries = mapSeries; exports.mapValues = mapValues; exports.mapValuesLimit = mapValuesLimit; exports.mapValuesSeries = mapValuesSeries; exports.memoize = memoize; exports.nextTick = nextTick; exports.parallel = parallelLimit; exports.parallelLimit = parallelLimit$1; exports.priorityQueue = priorityQueue; exports.queue = queue$1; exports.race = race; exports.reduce = reduce; exports.reduceRight = reduceRight; exports.reflect = reflect; exports.reflectAll = reflectAll; exports.reject = reject; exports.rejectLimit = rejectLimit; exports.rejectSeries = rejectSeries; exports.retry = retry; exports.retryable = retryable; exports.seq = seq; exports.series = series; exports.setImmediate = setImmediate$1; exports.some = some; exports.someLimit = someLimit; exports.someSeries = someSeries; exports.sortBy = sortBy; exports.timeout = timeout; exports.times = times; exports.timesLimit = timeLimit; exports.timesSeries = timesSeries; exports.transform = transform; exports.tryEach = tryEach; exports.unmemoize = unmemoize; exports.until = until; exports.waterfall = waterfall; exports.whilst = whilst; exports.all = every; exports.allLimit = everyLimit; exports.allSeries = everySeries; exports.any = some; exports.anyLimit = someLimit; exports.anySeries = someSeries; exports.find = detect; exports.findLimit = detectLimit; exports.findSeries = detectSeries; exports.forEach = eachLimit; exports.forEachSeries = eachSeries; exports.forEachLimit = eachLimit$1; exports.forEachOf = eachOf; exports.forEachOfSeries = eachOfSeries; exports.forEachOfLimit = eachOfLimit; exports.inject = reduce; exports.foldl = reduce; exports.foldr = reduceRight; exports.select = filter; exports.selectLimit = filterLimit; exports.selectSeries = filterSeries; exports.wrapSync = asyncify; Object.defineProperty(exports, '__esModule', { value: true }); }))); }).call(this,_dereq_('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) },{"_process":95}],44:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./enc-base64"), _dereq_("./md5"), _dereq_("./evpkdf"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./enc-base64", "./md5", "./evpkdf", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var BlockCipher = C_lib.BlockCipher; var C_algo = C.algo; // Lookup tables var SBOX = []; var INV_SBOX = []; var SUB_MIX_0 = []; var SUB_MIX_1 = []; var SUB_MIX_2 = []; var SUB_MIX_3 = []; var INV_SUB_MIX_0 = []; var INV_SUB_MIX_1 = []; var INV_SUB_MIX_2 = []; var INV_SUB_MIX_3 = []; // Compute lookup tables (function () { // Compute double table var d = []; for (var i = 0; i < 256; i++) { if (i < 128) { d[i] = i << 1; } else { d[i] = (i << 1) ^ 0x11b; } } // Walk GF(2^8) var x = 0; var xi = 0; for (var i = 0; i < 256; i++) { // Compute sbox var sx = xi ^ (xi << 1) ^ (xi << 2) ^ (xi << 3) ^ (xi << 4); sx = (sx >>> 8) ^ (sx & 0xff) ^ 0x63; SBOX[x] = sx; INV_SBOX[sx] = x; // Compute multiplication var x2 = d[x]; var x4 = d[x2]; var x8 = d[x4]; // Compute sub bytes, mix columns tables var t = (d[sx] * 0x101) ^ (sx * 0x1010100); SUB_MIX_0[x] = (t << 24) | (t >>> 8); SUB_MIX_1[x] = (t << 16) | (t >>> 16); SUB_MIX_2[x] = (t << 8) | (t >>> 24); SUB_MIX_3[x] = t; // Compute inv sub bytes, inv mix columns tables var t = (x8 * 0x1010101) ^ (x4 * 0x10001) ^ (x2 * 0x101) ^ (x * 0x1010100); INV_SUB_MIX_0[sx] = (t << 24) | (t >>> 8); INV_SUB_MIX_1[sx] = (t << 16) | (t >>> 16); INV_SUB_MIX_2[sx] = (t << 8) | (t >>> 24); INV_SUB_MIX_3[sx] = t; // Compute next counter if (!x) { x = xi = 1; } else { x = x2 ^ d[d[d[x8 ^ x2]]]; xi ^= d[d[xi]]; } } }()); // Precomputed Rcon lookup var RCON = [0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36]; /** * AES block cipher algorithm. */ var AES = C_algo.AES = BlockCipher.extend({ _doReset: function () { // Skip reset of nRounds has been set before and key did not change if (this._nRounds && this._keyPriorReset === this._key) { return; } // Shortcuts var key = this._keyPriorReset = this._key; var keyWords = key.words; var keySize = key.sigBytes / 4; // Compute number of rounds var nRounds = this._nRounds = keySize + 6; // Compute number of key schedule rows var ksRows = (nRounds + 1) * 4; // Compute key schedule var keySchedule = this._keySchedule = []; for (var ksRow = 0; ksRow < ksRows; ksRow++) { if (ksRow < keySize) { keySchedule[ksRow] = keyWords[ksRow]; } else { var t = keySchedule[ksRow - 1]; if (!(ksRow % keySize)) { // Rot word t = (t << 8) | (t >>> 24); // Sub word t = (SBOX[t >>> 24] << 24) | (SBOX[(t >>> 16) & 0xff] << 16) | (SBOX[(t >>> 8) & 0xff] << 8) | SBOX[t & 0xff]; // Mix Rcon t ^= RCON[(ksRow / keySize) | 0] << 24; } else if (keySize > 6 && ksRow % keySize == 4) { // Sub word t = (SBOX[t >>> 24] << 24) | (SBOX[(t >>> 16) & 0xff] << 16) | (SBOX[(t >>> 8) & 0xff] << 8) | SBOX[t & 0xff]; } keySchedule[ksRow] = keySchedule[ksRow - keySize] ^ t; } } // Compute inv key schedule var invKeySchedule = this._invKeySchedule = []; for (var invKsRow = 0; invKsRow < ksRows; invKsRow++) { var ksRow = ksRows - invKsRow; if (invKsRow % 4) { var t = keySchedule[ksRow]; } else { var t = keySchedule[ksRow - 4]; } if (invKsRow < 4 || ksRow <= 4) { invKeySchedule[invKsRow] = t; } else { invKeySchedule[invKsRow] = INV_SUB_MIX_0[SBOX[t >>> 24]] ^ INV_SUB_MIX_1[SBOX[(t >>> 16) & 0xff]] ^ INV_SUB_MIX_2[SBOX[(t >>> 8) & 0xff]] ^ INV_SUB_MIX_3[SBOX[t & 0xff]]; } } }, encryptBlock: function (M, offset) { this._doCryptBlock(M, offset, this._keySchedule, SUB_MIX_0, SUB_MIX_1, SUB_MIX_2, SUB_MIX_3, SBOX); }, decryptBlock: function (M, offset) { // Swap 2nd and 4th rows var t = M[offset + 1]; M[offset + 1] = M[offset + 3]; M[offset + 3] = t; this._doCryptBlock(M, offset, this._invKeySchedule, INV_SUB_MIX_0, INV_SUB_MIX_1, INV_SUB_MIX_2, INV_SUB_MIX_3, INV_SBOX); // Inv swap 2nd and 4th rows var t = M[offset + 1]; M[offset + 1] = M[offset + 3]; M[offset + 3] = t; }, _doCryptBlock: function (M, offset, keySchedule, SUB_MIX_0, SUB_MIX_1, SUB_MIX_2, SUB_MIX_3, SBOX) { // Shortcut var nRounds = this._nRounds; // Get input, add round key var s0 = M[offset] ^ keySchedule[0]; var s1 = M[offset + 1] ^ keySchedule[1]; var s2 = M[offset + 2] ^ keySchedule[2]; var s3 = M[offset + 3] ^ keySchedule[3]; // Key schedule row counter var ksRow = 4; // Rounds for (var round = 1; round < nRounds; round++) { // Shift rows, sub bytes, mix columns, add round key var t0 = SUB_MIX_0[s0 >>> 24] ^ SUB_MIX_1[(s1 >>> 16) & 0xff] ^ SUB_MIX_2[(s2 >>> 8) & 0xff] ^ SUB_MIX_3[s3 & 0xff] ^ keySchedule[ksRow++]; var t1 = SUB_MIX_0[s1 >>> 24] ^ SUB_MIX_1[(s2 >>> 16) & 0xff] ^ SUB_MIX_2[(s3 >>> 8) & 0xff] ^ SUB_MIX_3[s0 & 0xff] ^ keySchedule[ksRow++]; var t2 = SUB_MIX_0[s2 >>> 24] ^ SUB_MIX_1[(s3 >>> 16) & 0xff] ^ SUB_MIX_2[(s0 >>> 8) & 0xff] ^ SUB_MIX_3[s1 & 0xff] ^ keySchedule[ksRow++]; var t3 = SUB_MIX_0[s3 >>> 24] ^ SUB_MIX_1[(s0 >>> 16) & 0xff] ^ SUB_MIX_2[(s1 >>> 8) & 0xff] ^ SUB_MIX_3[s2 & 0xff] ^ keySchedule[ksRow++]; // Update state s0 = t0; s1 = t1; s2 = t2; s3 = t3; } // Shift rows, sub bytes, add round key var t0 = ((SBOX[s0 >>> 24] << 24) | (SBOX[(s1 >>> 16) & 0xff] << 16) | (SBOX[(s2 >>> 8) & 0xff] << 8) | SBOX[s3 & 0xff]) ^ keySchedule[ksRow++]; var t1 = ((SBOX[s1 >>> 24] << 24) | (SBOX[(s2 >>> 16) & 0xff] << 16) | (SBOX[(s3 >>> 8) & 0xff] << 8) | SBOX[s0 & 0xff]) ^ keySchedule[ksRow++]; var t2 = ((SBOX[s2 >>> 24] << 24) | (SBOX[(s3 >>> 16) & 0xff] << 16) | (SBOX[(s0 >>> 8) & 0xff] << 8) | SBOX[s1 & 0xff]) ^ keySchedule[ksRow++]; var t3 = ((SBOX[s3 >>> 24] << 24) | (SBOX[(s0 >>> 16) & 0xff] << 16) | (SBOX[(s1 >>> 8) & 0xff] << 8) | SBOX[s2 & 0xff]) ^ keySchedule[ksRow++]; // Set output M[offset] = t0; M[offset + 1] = t1; M[offset + 2] = t2; M[offset + 3] = t3; }, keySize: 256/32 }); /** * Shortcut functions to the cipher's object interface. * * @example * * var ciphertext = CryptoJS.AES.encrypt(message, key, cfg); * var plaintext = CryptoJS.AES.decrypt(ciphertext, key, cfg); */ C.AES = BlockCipher._createHelper(AES); }()); return CryptoJS.AES; })); },{"./cipher-core":45,"./core":46,"./enc-base64":47,"./evpkdf":49,"./md5":54}],45:[function(_dereq_,module,exports){ ;(function (root, factory) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { /** * Cipher core components. */ CryptoJS.lib.Cipher || (function (undefined) { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var Base = C_lib.Base; var WordArray = C_lib.WordArray; var BufferedBlockAlgorithm = C_lib.BufferedBlockAlgorithm; var C_enc = C.enc; var Utf8 = C_enc.Utf8; var Base64 = C_enc.Base64; var C_algo = C.algo; var EvpKDF = C_algo.EvpKDF; /** * Abstract base cipher template. * * @property {number} keySize This cipher's key size. Default: 4 (128 bits) * @property {number} ivSize This cipher's IV size. Default: 4 (128 bits) * @property {number} _ENC_XFORM_MODE A constant representing encryption mode. * @property {number} _DEC_XFORM_MODE A constant representing decryption mode. */ var Cipher = C_lib.Cipher = BufferedBlockAlgorithm.extend({ /** * Configuration options. * * @property {WordArray} iv The IV to use for this operation. */ cfg: Base.extend(), /** * Creates this cipher in encryption mode. * * @param {WordArray} key The key. * @param {Object} cfg (Optional) The configuration options to use for this operation. * * @return {Cipher} A cipher instance. * * @static * * @example * * var cipher = CryptoJS.algo.AES.createEncryptor(keyWordArray, { iv: ivWordArray }); */ createEncryptor: function (key, cfg) { return this.create(this._ENC_XFORM_MODE, key, cfg); }, /** * Creates this cipher in decryption mode. * * @param {WordArray} key The key. * @param {Object} cfg (Optional) The configuration options to use for this operation. * * @return {Cipher} A cipher instance. * * @static * * @example * * var cipher = CryptoJS.algo.AES.createDecryptor(keyWordArray, { iv: ivWordArray }); */ createDecryptor: function (key, cfg) { return this.create(this._DEC_XFORM_MODE, key, cfg); }, /** * Initializes a newly created cipher. * * @param {number} xformMode Either the encryption or decryption transormation mode constant. * @param {WordArray} key The key. * @param {Object} cfg (Optional) The configuration options to use for this operation. * * @example * * var cipher = CryptoJS.algo.AES.create(CryptoJS.algo.AES._ENC_XFORM_MODE, keyWordArray, { iv: ivWordArray }); */ init: function (xformMode, key, cfg) { // Apply config defaults this.cfg = this.cfg.extend(cfg); // Store transform mode and key this._xformMode = xformMode; this._key = key; // Set initial values this.reset(); }, /** * Resets this cipher to its initial state. * * @example * * cipher.reset(); */ reset: function () { // Reset data buffer BufferedBlockAlgorithm.reset.call(this); // Perform concrete-cipher logic this._doReset(); }, /** * Adds data to be encrypted or decrypted. * * @param {WordArray|string} dataUpdate The data to encrypt or decrypt. * * @return {WordArray} The data after processing. * * @example * * var encrypted = cipher.process('data'); * var encrypted = cipher.process(wordArray); */ process: function (dataUpdate) { // Append this._append(dataUpdate); // Process available blocks return this._process(); }, /** * Finalizes the encryption or decryption process. * Note that the finalize operation is effectively a destructive, read-once operation. * * @param {WordArray|string} dataUpdate The final data to encrypt or decrypt. * * @return {WordArray} The data after final processing. * * @example * * var encrypted = cipher.finalize(); * var encrypted = cipher.finalize('data'); * var encrypted = cipher.finalize(wordArray); */ finalize: function (dataUpdate) { // Final data update if (dataUpdate) { this._append(dataUpdate); } // Perform concrete-cipher logic var finalProcessedData = this._doFinalize(); return finalProcessedData; }, keySize: 128/32, ivSize: 128/32, _ENC_XFORM_MODE: 1, _DEC_XFORM_MODE: 2, /** * Creates shortcut functions to a cipher's object interface. * * @param {Cipher} cipher The cipher to create a helper for. * * @return {Object} An object with encrypt and decrypt shortcut functions. * * @static * * @example * * var AES = CryptoJS.lib.Cipher._createHelper(CryptoJS.algo.AES); */ _createHelper: (function () { function selectCipherStrategy(key) { if (typeof key == 'string') { return PasswordBasedCipher; } else { return SerializableCipher; } } return function (cipher) { return { encrypt: function (message, key, cfg) { return selectCipherStrategy(key).encrypt(cipher, message, key, cfg); }, decrypt: function (ciphertext, key, cfg) { return selectCipherStrategy(key).decrypt(cipher, ciphertext, key, cfg); } }; }; }()) }); /** * Abstract base stream cipher template. * * @property {number} blockSize The number of 32-bit words this cipher operates on. Default: 1 (32 bits) */ var StreamCipher = C_lib.StreamCipher = Cipher.extend({ _doFinalize: function () { // Process partial blocks var finalProcessedBlocks = this._process(!!'flush'); return finalProcessedBlocks; }, blockSize: 1 }); /** * Mode namespace. */ var C_mode = C.mode = {}; /** * Abstract base block cipher mode template. */ var BlockCipherMode = C_lib.BlockCipherMode = Base.extend({ /** * Creates this mode for encryption. * * @param {Cipher} cipher A block cipher instance. * @param {Array} iv The IV words. * * @static * * @example * * var mode = CryptoJS.mode.CBC.createEncryptor(cipher, iv.words); */ createEncryptor: function (cipher, iv) { return this.Encryptor.create(cipher, iv); }, /** * Creates this mode for decryption. * * @param {Cipher} cipher A block cipher instance. * @param {Array} iv The IV words. * * @static * * @example * * var mode = CryptoJS.mode.CBC.createDecryptor(cipher, iv.words); */ createDecryptor: function (cipher, iv) { return this.Decryptor.create(cipher, iv); }, /** * Initializes a newly created mode. * * @param {Cipher} cipher A block cipher instance. * @param {Array} iv The IV words. * * @example * * var mode = CryptoJS.mode.CBC.Encryptor.create(cipher, iv.words); */ init: function (cipher, iv) { this._cipher = cipher; this._iv = iv; } }); /** * Cipher Block Chaining mode. */ var CBC = C_mode.CBC = (function () { /** * Abstract base CBC mode. */ var CBC = BlockCipherMode.extend(); /** * CBC encryptor. */ CBC.Encryptor = CBC.extend({ /** * Processes the data block at offset. * * @param {Array} words The data words to operate on. * @param {number} offset The offset where the block starts. * * @example * * mode.processBlock(data.words, offset); */ processBlock: function (words, offset) { // Shortcuts var cipher = this._cipher; var blockSize = cipher.blockSize; // XOR and encrypt xorBlock.call(this, words, offset, blockSize); cipher.encryptBlock(words, offset); // Remember this block to use with next block this._prevBlock = words.slice(offset, offset + blockSize); } }); /** * CBC decryptor. */ CBC.Decryptor = CBC.extend({ /** * Processes the data block at offset. * * @param {Array} words The data words to operate on. * @param {number} offset The offset where the block starts. * * @example * * mode.processBlock(data.words, offset); */ processBlock: function (words, offset) { // Shortcuts var cipher = this._cipher; var blockSize = cipher.blockSize; // Remember this block to use with next block var thisBlock = words.slice(offset, offset + blockSize); // Decrypt and XOR cipher.decryptBlock(words, offset); xorBlock.call(this, words, offset, blockSize); // This block becomes the previous block this._prevBlock = thisBlock; } }); function xorBlock(words, offset, blockSize) { // Shortcut var iv = this._iv; // Choose mixing block if (iv) { var block = iv; // Remove IV for subsequent blocks this._iv = undefined; } else { var block = this._prevBlock; } // XOR blocks for (var i = 0; i < blockSize; i++) { words[offset + i] ^= block[i]; } } return CBC; }()); /** * Padding namespace. */ var C_pad = C.pad = {}; /** * PKCS #5/7 padding strategy. */ var Pkcs7 = C_pad.Pkcs7 = { /** * Pads data using the algorithm defined in PKCS #5/7. * * @param {WordArray} data The data to pad. * @param {number} blockSize The multiple that the data should be padded to. * * @static * * @example * * CryptoJS.pad.Pkcs7.pad(wordArray, 4); */ pad: function (data, blockSize) { // Shortcut var blockSizeBytes = blockSize * 4; // Count padding bytes var nPaddingBytes = blockSizeBytes - data.sigBytes % blockSizeBytes; // Create padding word var paddingWord = (nPaddingBytes << 24) | (nPaddingBytes << 16) | (nPaddingBytes << 8) | nPaddingBytes; // Create padding var paddingWords = []; for (var i = 0; i < nPaddingBytes; i += 4) { paddingWords.push(paddingWord); } var padding = WordArray.create(paddingWords, nPaddingBytes); // Add padding data.concat(padding); }, /** * Unpads data that had been padded using the algorithm defined in PKCS #5/7. * * @param {WordArray} data The data to unpad. * * @static * * @example * * CryptoJS.pad.Pkcs7.unpad(wordArray); */ unpad: function (data) { // Get number of padding bytes from last byte var nPaddingBytes = data.words[(data.sigBytes - 1) >>> 2] & 0xff; // Remove padding data.sigBytes -= nPaddingBytes; } }; /** * Abstract base block cipher template. * * @property {number} blockSize The number of 32-bit words this cipher operates on. Default: 4 (128 bits) */ var BlockCipher = C_lib.BlockCipher = Cipher.extend({ /** * Configuration options. * * @property {Mode} mode The block mode to use. Default: CBC * @property {Padding} padding The padding strategy to use. Default: Pkcs7 */ cfg: Cipher.cfg.extend({ mode: CBC, padding: Pkcs7 }), reset: function () { // Reset cipher Cipher.reset.call(this); // Shortcuts var cfg = this.cfg; var iv = cfg.iv; var mode = cfg.mode; // Reset block mode if (this._xformMode == this._ENC_XFORM_MODE) { var modeCreator = mode.createEncryptor; } else /* if (this._xformMode == this._DEC_XFORM_MODE) */ { var modeCreator = mode.createDecryptor; // Keep at least one block in the buffer for unpadding this._minBufferSize = 1; } this._mode = modeCreator.call(mode, this, iv && iv.words); }, _doProcessBlock: function (words, offset) { this._mode.processBlock(words, offset); }, _doFinalize: function () { // Shortcut var padding = this.cfg.padding; // Finalize if (this._xformMode == this._ENC_XFORM_MODE) { // Pad data padding.pad(this._data, this.blockSize); // Process final blocks var finalProcessedBlocks = this._process(!!'flush'); } else /* if (this._xformMode == this._DEC_XFORM_MODE) */ { // Process final blocks var finalProcessedBlocks = this._process(!!'flush'); // Unpad data padding.unpad(finalProcessedBlocks); } return finalProcessedBlocks; }, blockSize: 128/32 }); /** * A collection of cipher parameters. * * @property {WordArray} ciphertext The raw ciphertext. * @property {WordArray} key The key to this ciphertext. * @property {WordArray} iv The IV used in the ciphering operation. * @property {WordArray} salt The salt used with a key derivation function. * @property {Cipher} algorithm The cipher algorithm. * @property {Mode} mode The block mode used in the ciphering operation. * @property {Padding} padding The padding scheme used in the ciphering operation. * @property {number} blockSize The block size of the cipher. * @property {Format} formatter The default formatting strategy to convert this cipher params object to a string. */ var CipherParams = C_lib.CipherParams = Base.extend({ /** * Initializes a newly created cipher params object. * * @param {Object} cipherParams An object with any of the possible cipher parameters. * * @example * * var cipherParams = CryptoJS.lib.CipherParams.create({ * ciphertext: ciphertextWordArray, * key: keyWordArray, * iv: ivWordArray, * salt: saltWordArray, * algorithm: CryptoJS.algo.AES, * mode: CryptoJS.mode.CBC, * padding: CryptoJS.pad.PKCS7, * blockSize: 4, * formatter: CryptoJS.format.OpenSSL * }); */ init: function (cipherParams) { this.mixIn(cipherParams); }, /** * Converts this cipher params object to a string. * * @param {Format} formatter (Optional) The formatting strategy to use. * * @return {string} The stringified cipher params. * * @throws Error If neither the formatter nor the default formatter is set. * * @example * * var string = cipherParams + ''; * var string = cipherParams.toString(); * var string = cipherParams.toString(CryptoJS.format.OpenSSL); */ toString: function (formatter) { return (formatter || this.formatter).stringify(this); } }); /** * Format namespace. */ var C_format = C.format = {}; /** * OpenSSL formatting strategy. */ var OpenSSLFormatter = C_format.OpenSSL = { /** * Converts a cipher params object to an OpenSSL-compatible string. * * @param {CipherParams} cipherParams The cipher params object. * * @return {string} The OpenSSL-compatible string. * * @static * * @example * * var openSSLString = CryptoJS.format.OpenSSL.stringify(cipherParams); */ stringify: function (cipherParams) { // Shortcuts var ciphertext = cipherParams.ciphertext; var salt = cipherParams.salt; // Format if (salt) { var wordArray = WordArray.create([0x53616c74, 0x65645f5f]).concat(salt).concat(ciphertext); } else { var wordArray = ciphertext; } return wordArray.toString(Base64); }, /** * Converts an OpenSSL-compatible string to a cipher params object. * * @param {string} openSSLStr The OpenSSL-compatible string. * * @return {CipherParams} The cipher params object. * * @static * * @example * * var cipherParams = CryptoJS.format.OpenSSL.parse(openSSLString); */ parse: function (openSSLStr) { // Parse base64 var ciphertext = Base64.parse(openSSLStr); // Shortcut var ciphertextWords = ciphertext.words; // Test for salt if (ciphertextWords[0] == 0x53616c74 && ciphertextWords[1] == 0x65645f5f) { // Extract salt var salt = WordArray.create(ciphertextWords.slice(2, 4)); // Remove salt from ciphertext ciphertextWords.splice(0, 4); ciphertext.sigBytes -= 16; } return CipherParams.create({ ciphertext: ciphertext, salt: salt }); } }; /** * A cipher wrapper that returns ciphertext as a serializable cipher params object. */ var SerializableCipher = C_lib.SerializableCipher = Base.extend({ /** * Configuration options. * * @property {Formatter} format The formatting strategy to convert cipher param objects to and from a string. Default: OpenSSL */ cfg: Base.extend({ format: OpenSSLFormatter }), /** * Encrypts a message. * * @param {Cipher} cipher The cipher algorithm to use. * @param {WordArray|string} message The message to encrypt. * @param {WordArray} key The key. * @param {Object} cfg (Optional) The configuration options to use for this operation. * * @return {CipherParams} A cipher params object. * * @static * * @example * * var ciphertextParams = CryptoJS.lib.SerializableCipher.encrypt(CryptoJS.algo.AES, message, key); * var ciphertextParams = CryptoJS.lib.SerializableCipher.encrypt(CryptoJS.algo.AES, message, key, { iv: iv }); * var ciphertextParams = CryptoJS.lib.SerializableCipher.encrypt(CryptoJS.algo.AES, message, key, { iv: iv, format: CryptoJS.format.OpenSSL }); */ encrypt: function (cipher, message, key, cfg) { // Apply config defaults cfg = this.cfg.extend(cfg); // Encrypt var encryptor = cipher.createEncryptor(key, cfg); var ciphertext = encryptor.finalize(message); // Shortcut var cipherCfg = encryptor.cfg; // Create and return serializable cipher params return CipherParams.create({ ciphertext: ciphertext, key: key, iv: cipherCfg.iv, algorithm: cipher, mode: cipherCfg.mode, padding: cipherCfg.padding, blockSize: cipher.blockSize, formatter: cfg.format }); }, /** * Decrypts serialized ciphertext. * * @param {Cipher} cipher The cipher algorithm to use. * @param {CipherParams|string} ciphertext The ciphertext to decrypt. * @param {WordArray} key The key. * @param {Object} cfg (Optional) The configuration options to use for this operation. * * @return {WordArray} The plaintext. * * @static * * @example * * var plaintext = CryptoJS.lib.SerializableCipher.decrypt(CryptoJS.algo.AES, formattedCiphertext, key, { iv: iv, format: CryptoJS.format.OpenSSL }); * var plaintext = CryptoJS.lib.SerializableCipher.decrypt(CryptoJS.algo.AES, ciphertextParams, key, { iv: iv, format: CryptoJS.format.OpenSSL }); */ decrypt: function (cipher, ciphertext, key, cfg) { // Apply config defaults cfg = this.cfg.extend(cfg); // Convert string to CipherParams ciphertext = this._parse(ciphertext, cfg.format); // Decrypt var plaintext = cipher.createDecryptor(key, cfg).finalize(ciphertext.ciphertext); return plaintext; }, /** * Converts serialized ciphertext to CipherParams, * else assumed CipherParams already and returns ciphertext unchanged. * * @param {CipherParams|string} ciphertext The ciphertext. * @param {Formatter} format The formatting strategy to use to parse serialized ciphertext. * * @return {CipherParams} The unserialized ciphertext. * * @static * * @example * * var ciphertextParams = CryptoJS.lib.SerializableCipher._parse(ciphertextStringOrParams, format); */ _parse: function (ciphertext, format) { if (typeof ciphertext == 'string') { return format.parse(ciphertext, this); } else { return ciphertext; } } }); /** * Key derivation function namespace. */ var C_kdf = C.kdf = {}; /** * OpenSSL key derivation function. */ var OpenSSLKdf = C_kdf.OpenSSL = { /** * Derives a key and IV from a password. * * @param {string} password The password to derive from. * @param {number} keySize The size in words of the key to generate. * @param {number} ivSize The size in words of the IV to generate. * @param {WordArray|string} salt (Optional) A 64-bit salt to use. If omitted, a salt will be generated randomly. * * @return {CipherParams} A cipher params object with the key, IV, and salt. * * @static * * @example * * var derivedParams = CryptoJS.kdf.OpenSSL.execute('Password', 256/32, 128/32); * var derivedParams = CryptoJS.kdf.OpenSSL.execute('Password', 256/32, 128/32, 'saltsalt'); */ execute: function (password, keySize, ivSize, salt) { // Generate random salt if (!salt) { salt = WordArray.random(64/8); } // Derive key and IV var key = EvpKDF.create({ keySize: keySize + ivSize }).compute(password, salt); // Separate key and IV var iv = WordArray.create(key.words.slice(keySize), ivSize * 4); key.sigBytes = keySize * 4; // Return params return CipherParams.create({ key: key, iv: iv, salt: salt }); } }; /** * A serializable cipher wrapper that derives the key from a password, * and returns ciphertext as a serializable cipher params object. */ var PasswordBasedCipher = C_lib.PasswordBasedCipher = SerializableCipher.extend({ /** * Configuration options. * * @property {KDF} kdf The key derivation function to use to generate a key and IV from a password. Default: OpenSSL */ cfg: SerializableCipher.cfg.extend({ kdf: OpenSSLKdf }), /** * Encrypts a message using a password. * * @param {Cipher} cipher The cipher algorithm to use. * @param {WordArray|string} message The message to encrypt. * @param {string} password The password. * @param {Object} cfg (Optional) The configuration options to use for this operation. * * @return {CipherParams} A cipher params object. * * @static * * @example * * var ciphertextParams = CryptoJS.lib.PasswordBasedCipher.encrypt(CryptoJS.algo.AES, message, 'password'); * var ciphertextParams = CryptoJS.lib.PasswordBasedCipher.encrypt(CryptoJS.algo.AES, message, 'password', { format: CryptoJS.format.OpenSSL }); */ encrypt: function (cipher, message, password, cfg) { // Apply config defaults cfg = this.cfg.extend(cfg); // Derive key and other params var derivedParams = cfg.kdf.execute(password, cipher.keySize, cipher.ivSize); // Add IV to config cfg.iv = derivedParams.iv; // Encrypt var ciphertext = SerializableCipher.encrypt.call(this, cipher, message, derivedParams.key, cfg); // Mix in derived params ciphertext.mixIn(derivedParams); return ciphertext; }, /** * Decrypts serialized ciphertext using a password. * * @param {Cipher} cipher The cipher algorithm to use. * @param {CipherParams|string} ciphertext The ciphertext to decrypt. * @param {string} password The password. * @param {Object} cfg (Optional) The configuration options to use for this operation. * * @return {WordArray} The plaintext. * * @static * * @example * * var plaintext = CryptoJS.lib.PasswordBasedCipher.decrypt(CryptoJS.algo.AES, formattedCiphertext, 'password', { format: CryptoJS.format.OpenSSL }); * var plaintext = CryptoJS.lib.PasswordBasedCipher.decrypt(CryptoJS.algo.AES, ciphertextParams, 'password', { format: CryptoJS.format.OpenSSL }); */ decrypt: function (cipher, ciphertext, password, cfg) { // Apply config defaults cfg = this.cfg.extend(cfg); // Convert string to CipherParams ciphertext = this._parse(ciphertext, cfg.format); // Derive key and other params var derivedParams = cfg.kdf.execute(password, cipher.keySize, cipher.ivSize, ciphertext.salt); // Add IV to config cfg.iv = derivedParams.iv; // Decrypt var plaintext = SerializableCipher.decrypt.call(this, cipher, ciphertext, derivedParams.key, cfg); return plaintext; } }); }()); })); },{"./core":46}],46:[function(_dereq_,module,exports){ ;(function (root, factory) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(); } else if (typeof define === "function" && define.amd) { // AMD define([], factory); } else { // Global (browser) root.CryptoJS = factory(); } }(this, function () { /** * CryptoJS core components. */ var CryptoJS = CryptoJS || (function (Math, undefined) { /* * Local polyfil of Object.create */ var create = Object.create || (function () { function F() {}; return function (obj) { var subtype; F.prototype = obj; subtype = new F(); F.prototype = null; return subtype; }; }()) /** * CryptoJS namespace. */ var C = {}; /** * Library namespace. */ var C_lib = C.lib = {}; /** * Base object for prototypal inheritance. */ var Base = C_lib.Base = (function () { return { /** * Creates a new object that inherits from this object. * * @param {Object} overrides Properties to copy into the new object. * * @return {Object} The new object. * * @static * * @example * * var MyType = CryptoJS.lib.Base.extend({ * field: 'value', * * method: function () { * } * }); */ extend: function (overrides) { // Spawn var subtype = create(this); // Augment if (overrides) { subtype.mixIn(overrides); } // Create default initializer if (!subtype.hasOwnProperty('init') || this.init === subtype.init) { subtype.init = function () { subtype.$super.init.apply(this, arguments); }; } // Initializer's prototype is the subtype object subtype.init.prototype = subtype; // Reference supertype subtype.$super = this; return subtype; }, /** * Extends this object and runs the init method. * Arguments to create() will be passed to init(). * * @return {Object} The new object. * * @static * * @example * * var instance = MyType.create(); */ create: function () { var instance = this.extend(); instance.init.apply(instance, arguments); return instance; }, /** * Initializes a newly created object. * Override this method to add some logic when your objects are created. * * @example * * var MyType = CryptoJS.lib.Base.extend({ * init: function () { * // ... * } * }); */ init: function () { }, /** * Copies properties into this object. * * @param {Object} properties The properties to mix in. * * @example * * MyType.mixIn({ * field: 'value' * }); */ mixIn: function (properties) { for (var propertyName in properties) { if (properties.hasOwnProperty(propertyName)) { this[propertyName] = properties[propertyName]; } } // IE won't copy toString using the loop above if (properties.hasOwnProperty('toString')) { this.toString = properties.toString; } }, /** * Creates a copy of this object. * * @return {Object} The clone. * * @example * * var clone = instance.clone(); */ clone: function () { return this.init.prototype.extend(this); } }; }()); /** * An array of 32-bit words. * * @property {Array} words The array of 32-bit words. * @property {number} sigBytes The number of significant bytes in this word array. */ var WordArray = C_lib.WordArray = Base.extend({ /** * Initializes a newly created word array. * * @param {Array} words (Optional) An array of 32-bit words. * @param {number} sigBytes (Optional) The number of significant bytes in the words. * * @example * * var wordArray = CryptoJS.lib.WordArray.create(); * var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607]); * var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607], 6); */ init: function (words, sigBytes) { words = this.words = words || []; if (sigBytes != undefined) { this.sigBytes = sigBytes; } else { this.sigBytes = words.length * 4; } }, /** * Converts this word array to a string. * * @param {Encoder} encoder (Optional) The encoding strategy to use. Default: CryptoJS.enc.Hex * * @return {string} The stringified word array. * * @example * * var string = wordArray + ''; * var string = wordArray.toString(); * var string = wordArray.toString(CryptoJS.enc.Utf8); */ toString: function (encoder) { return (encoder || Hex).stringify(this); }, /** * Concatenates a word array to this word array. * * @param {WordArray} wordArray The word array to append. * * @return {WordArray} This word array. * * @example * * wordArray1.concat(wordArray2); */ concat: function (wordArray) { // Shortcuts var thisWords = this.words; var thatWords = wordArray.words; var thisSigBytes = this.sigBytes; var thatSigBytes = wordArray.sigBytes; // Clamp excess bits this.clamp(); // Concat if (thisSigBytes % 4) { // Copy one byte at a time for (var i = 0; i < thatSigBytes; i++) { var thatByte = (thatWords[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; thisWords[(thisSigBytes + i) >>> 2] |= thatByte << (24 - ((thisSigBytes + i) % 4) * 8); } } else { // Copy one word at a time for (var i = 0; i < thatSigBytes; i += 4) { thisWords[(thisSigBytes + i) >>> 2] = thatWords[i >>> 2]; } } this.sigBytes += thatSigBytes; // Chainable return this; }, /** * Removes insignificant bits. * * @example * * wordArray.clamp(); */ clamp: function () { // Shortcuts var words = this.words; var sigBytes = this.sigBytes; // Clamp words[sigBytes >>> 2] &= 0xffffffff << (32 - (sigBytes % 4) * 8); words.length = Math.ceil(sigBytes / 4); }, /** * Creates a copy of this word array. * * @return {WordArray} The clone. * * @example * * var clone = wordArray.clone(); */ clone: function () { var clone = Base.clone.call(this); clone.words = this.words.slice(0); return clone; }, /** * Creates a word array filled with random bytes. * * @param {number} nBytes The number of random bytes to generate. * * @return {WordArray} The random word array. * * @static * * @example * * var wordArray = CryptoJS.lib.WordArray.random(16); */ random: function (nBytes) { var words = []; var r = (function (m_w) { var m_w = m_w; var m_z = 0x3ade68b1; var mask = 0xffffffff; return function () { m_z = (0x9069 * (m_z & 0xFFFF) + (m_z >> 0x10)) & mask; m_w = (0x4650 * (m_w & 0xFFFF) + (m_w >> 0x10)) & mask; var result = ((m_z << 0x10) + m_w) & mask; result /= 0x100000000; result += 0.5; return result * (Math.random() > .5 ? 1 : -1); } }); for (var i = 0, rcache; i < nBytes; i += 4) { var _r = r((rcache || Math.random()) * 0x100000000); rcache = _r() * 0x3ade67b7; words.push((_r() * 0x100000000) | 0); } return new WordArray.init(words, nBytes); } }); /** * Encoder namespace. */ var C_enc = C.enc = {}; /** * Hex encoding strategy. */ var Hex = C_enc.Hex = { /** * Converts a word array to a hex string. * * @param {WordArray} wordArray The word array. * * @return {string} The hex string. * * @static * * @example * * var hexString = CryptoJS.enc.Hex.stringify(wordArray); */ stringify: function (wordArray) { // Shortcuts var words = wordArray.words; var sigBytes = wordArray.sigBytes; // Convert var hexChars = []; for (var i = 0; i < sigBytes; i++) { var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; hexChars.push((bite >>> 4).toString(16)); hexChars.push((bite & 0x0f).toString(16)); } return hexChars.join(''); }, /** * Converts a hex string to a word array. * * @param {string} hexStr The hex string. * * @return {WordArray} The word array. * * @static * * @example * * var wordArray = CryptoJS.enc.Hex.parse(hexString); */ parse: function (hexStr) { // Shortcut var hexStrLength = hexStr.length; // Convert var words = []; for (var i = 0; i < hexStrLength; i += 2) { words[i >>> 3] |= parseInt(hexStr.substr(i, 2), 16) << (24 - (i % 8) * 4); } return new WordArray.init(words, hexStrLength / 2); } }; /** * Latin1 encoding strategy. */ var Latin1 = C_enc.Latin1 = { /** * Converts a word array to a Latin1 string. * * @param {WordArray} wordArray The word array. * * @return {string} The Latin1 string. * * @static * * @example * * var latin1String = CryptoJS.enc.Latin1.stringify(wordArray); */ stringify: function (wordArray) { // Shortcuts var words = wordArray.words; var sigBytes = wordArray.sigBytes; // Convert var latin1Chars = []; for (var i = 0; i < sigBytes; i++) { var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; latin1Chars.push(String.fromCharCode(bite)); } return latin1Chars.join(''); }, /** * Converts a Latin1 string to a word array. * * @param {string} latin1Str The Latin1 string. * * @return {WordArray} The word array. * * @static * * @example * * var wordArray = CryptoJS.enc.Latin1.parse(latin1String); */ parse: function (latin1Str) { // Shortcut var latin1StrLength = latin1Str.length; // Convert var words = []; for (var i = 0; i < latin1StrLength; i++) { words[i >>> 2] |= (latin1Str.charCodeAt(i) & 0xff) << (24 - (i % 4) * 8); } return new WordArray.init(words, latin1StrLength); } }; /** * UTF-8 encoding strategy. */ var Utf8 = C_enc.Utf8 = { /** * Converts a word array to a UTF-8 string. * * @param {WordArray} wordArray The word array. * * @return {string} The UTF-8 string. * * @static * * @example * * var utf8String = CryptoJS.enc.Utf8.stringify(wordArray); */ stringify: function (wordArray) { try { return decodeURIComponent(escape(Latin1.stringify(wordArray))); } catch (e) { throw new Error('Malformed UTF-8 data'); } }, /** * Converts a UTF-8 string to a word array. * * @param {string} utf8Str The UTF-8 string. * * @return {WordArray} The word array. * * @static * * @example * * var wordArray = CryptoJS.enc.Utf8.parse(utf8String); */ parse: function (utf8Str) { return Latin1.parse(unescape(encodeURIComponent(utf8Str))); } }; /** * Abstract buffered block algorithm template. * * The property blockSize must be implemented in a concrete subtype. * * @property {number} _minBufferSize The number of blocks that should be kept unprocessed in the buffer. Default: 0 */ var BufferedBlockAlgorithm = C_lib.BufferedBlockAlgorithm = Base.extend({ /** * Resets this block algorithm's data buffer to its initial state. * * @example * * bufferedBlockAlgorithm.reset(); */ reset: function () { // Initial values this._data = new WordArray.init(); this._nDataBytes = 0; }, /** * Adds new data to this block algorithm's buffer. * * @param {WordArray|string} data The data to append. Strings are converted to a WordArray using UTF-8. * * @example * * bufferedBlockAlgorithm._append('data'); * bufferedBlockAlgorithm._append(wordArray); */ _append: function (data) { // Convert string to WordArray, else assume WordArray already if (typeof data == 'string') { data = Utf8.parse(data); } // Append this._data.concat(data); this._nDataBytes += data.sigBytes; }, /** * Processes available data blocks. * * This method invokes _doProcessBlock(offset), which must be implemented by a concrete subtype. * * @param {boolean} doFlush Whether all blocks and partial blocks should be processed. * * @return {WordArray} The processed data. * * @example * * var processedData = bufferedBlockAlgorithm._process(); * var processedData = bufferedBlockAlgorithm._process(!!'flush'); */ _process: function (doFlush) { // Shortcuts var data = this._data; var dataWords = data.words; var dataSigBytes = data.sigBytes; var blockSize = this.blockSize; var blockSizeBytes = blockSize * 4; // Count blocks ready var nBlocksReady = dataSigBytes / blockSizeBytes; if (doFlush) { // Round up to include partial blocks nBlocksReady = Math.ceil(nBlocksReady); } else { // Round down to include only full blocks, // less the number of blocks that must remain in the buffer nBlocksReady = Math.max((nBlocksReady | 0) - this._minBufferSize, 0); } // Count words ready var nWordsReady = nBlocksReady * blockSize; // Count bytes ready var nBytesReady = Math.min(nWordsReady * 4, dataSigBytes); // Process blocks if (nWordsReady) { for (var offset = 0; offset < nWordsReady; offset += blockSize) { // Perform concrete-algorithm logic this._doProcessBlock(dataWords, offset); } // Remove processed words var processedWords = dataWords.splice(0, nWordsReady); data.sigBytes -= nBytesReady; } // Return processed words return new WordArray.init(processedWords, nBytesReady); }, /** * Creates a copy of this object. * * @return {Object} The clone. * * @example * * var clone = bufferedBlockAlgorithm.clone(); */ clone: function () { var clone = Base.clone.call(this); clone._data = this._data.clone(); return clone; }, _minBufferSize: 0 }); /** * Abstract hasher template. * * @property {number} blockSize The number of 32-bit words this hasher operates on. Default: 16 (512 bits) */ var Hasher = C_lib.Hasher = BufferedBlockAlgorithm.extend({ /** * Configuration options. */ cfg: Base.extend(), /** * Initializes a newly created hasher. * * @param {Object} cfg (Optional) The configuration options to use for this hash computation. * * @example * * var hasher = CryptoJS.algo.SHA256.create(); */ init: function (cfg) { // Apply config defaults this.cfg = this.cfg.extend(cfg); // Set initial values this.reset(); }, /** * Resets this hasher to its initial state. * * @example * * hasher.reset(); */ reset: function () { // Reset data buffer BufferedBlockAlgorithm.reset.call(this); // Perform concrete-hasher logic this._doReset(); }, /** * Updates this hasher with a message. * * @param {WordArray|string} messageUpdate The message to append. * * @return {Hasher} This hasher. * * @example * * hasher.update('message'); * hasher.update(wordArray); */ update: function (messageUpdate) { // Append this._append(messageUpdate); // Update the hash this._process(); // Chainable return this; }, /** * Finalizes the hash computation. * Note that the finalize operation is effectively a destructive, read-once operation. * * @param {WordArray|string} messageUpdate (Optional) A final message update. * * @return {WordArray} The hash. * * @example * * var hash = hasher.finalize(); * var hash = hasher.finalize('message'); * var hash = hasher.finalize(wordArray); */ finalize: function (messageUpdate) { // Final message update if (messageUpdate) { this._append(messageUpdate); } // Perform concrete-hasher logic var hash = this._doFinalize(); return hash; }, blockSize: 512/32, /** * Creates a shortcut function to a hasher's object interface. * * @param {Hasher} hasher The hasher to create a helper for. * * @return {Function} The shortcut function. * * @static * * @example * * var SHA256 = CryptoJS.lib.Hasher._createHelper(CryptoJS.algo.SHA256); */ _createHelper: function (hasher) { return function (message, cfg) { return new hasher.init(cfg).finalize(message); }; }, /** * Creates a shortcut function to the HMAC's object interface. * * @param {Hasher} hasher The hasher to use in this HMAC helper. * * @return {Function} The shortcut function. * * @static * * @example * * var HmacSHA256 = CryptoJS.lib.Hasher._createHmacHelper(CryptoJS.algo.SHA256); */ _createHmacHelper: function (hasher) { return function (message, key) { return new C_algo.HMAC.init(hasher, key).finalize(message); }; } }); /** * Algorithm namespace. */ var C_algo = C.algo = {}; return C; }(Math)); return CryptoJS; })); },{}],47:[function(_dereq_,module,exports){ ;(function (root, factory) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var WordArray = C_lib.WordArray; var C_enc = C.enc; /** * Base64 encoding strategy. */ var Base64 = C_enc.Base64 = { /** * Converts a word array to a Base64 string. * * @param {WordArray} wordArray The word array. * * @return {string} The Base64 string. * * @static * * @example * * var base64String = CryptoJS.enc.Base64.stringify(wordArray); */ stringify: function (wordArray) { // Shortcuts var words = wordArray.words; var sigBytes = wordArray.sigBytes; var map = this._map; // Clamp excess bits wordArray.clamp(); // Convert var base64Chars = []; for (var i = 0; i < sigBytes; i += 3) { var byte1 = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; var byte2 = (words[(i + 1) >>> 2] >>> (24 - ((i + 1) % 4) * 8)) & 0xff; var byte3 = (words[(i + 2) >>> 2] >>> (24 - ((i + 2) % 4) * 8)) & 0xff; var triplet = (byte1 << 16) | (byte2 << 8) | byte3; for (var j = 0; (j < 4) && (i + j * 0.75 < sigBytes); j++) { base64Chars.push(map.charAt((triplet >>> (6 * (3 - j))) & 0x3f)); } } // Add padding var paddingChar = map.charAt(64); if (paddingChar) { while (base64Chars.length % 4) { base64Chars.push(paddingChar); } } return base64Chars.join(''); }, /** * Converts a Base64 string to a word array. * * @param {string} base64Str The Base64 string. * * @return {WordArray} The word array. * * @static * * @example * * var wordArray = CryptoJS.enc.Base64.parse(base64String); */ parse: function (base64Str) { // Shortcuts var base64StrLength = base64Str.length; var map = this._map; var reverseMap = this._reverseMap; if (!reverseMap) { reverseMap = this._reverseMap = []; for (var j = 0; j < map.length; j++) { reverseMap[map.charCodeAt(j)] = j; } } // Ignore padding var paddingChar = map.charAt(64); if (paddingChar) { var paddingIndex = base64Str.indexOf(paddingChar); if (paddingIndex !== -1) { base64StrLength = paddingIndex; } } // Convert return parseLoop(base64Str, base64StrLength, reverseMap); }, _map: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=' }; function parseLoop(base64Str, base64StrLength, reverseMap) { var words = []; var nBytes = 0; for (var i = 0; i < base64StrLength; i++) { if (i % 4) { var bits1 = reverseMap[base64Str.charCodeAt(i - 1)] << ((i % 4) * 2); var bits2 = reverseMap[base64Str.charCodeAt(i)] >>> (6 - (i % 4) * 2); words[nBytes >>> 2] |= (bits1 | bits2) << (24 - (nBytes % 4) * 8); nBytes++; } } return WordArray.create(words, nBytes); } }()); return CryptoJS.enc.Base64; })); },{"./core":46}],48:[function(_dereq_,module,exports){ ;(function (root, factory) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var WordArray = C_lib.WordArray; var C_enc = C.enc; /** * UTF-16 BE encoding strategy. */ var Utf16BE = C_enc.Utf16 = C_enc.Utf16BE = { /** * Converts a word array to a UTF-16 BE string. * * @param {WordArray} wordArray The word array. * * @return {string} The UTF-16 BE string. * * @static * * @example * * var utf16String = CryptoJS.enc.Utf16.stringify(wordArray); */ stringify: function (wordArray) { // Shortcuts var words = wordArray.words; var sigBytes = wordArray.sigBytes; // Convert var utf16Chars = []; for (var i = 0; i < sigBytes; i += 2) { var codePoint = (words[i >>> 2] >>> (16 - (i % 4) * 8)) & 0xffff; utf16Chars.push(String.fromCharCode(codePoint)); } return utf16Chars.join(''); }, /** * Converts a UTF-16 BE string to a word array. * * @param {string} utf16Str The UTF-16 BE string. * * @return {WordArray} The word array. * * @static * * @example * * var wordArray = CryptoJS.enc.Utf16.parse(utf16String); */ parse: function (utf16Str) { // Shortcut var utf16StrLength = utf16Str.length; // Convert var words = []; for (var i = 0; i < utf16StrLength; i++) { words[i >>> 1] |= utf16Str.charCodeAt(i) << (16 - (i % 2) * 16); } return WordArray.create(words, utf16StrLength * 2); } }; /** * UTF-16 LE encoding strategy. */ C_enc.Utf16LE = { /** * Converts a word array to a UTF-16 LE string. * * @param {WordArray} wordArray The word array. * * @return {string} The UTF-16 LE string. * * @static * * @example * * var utf16Str = CryptoJS.enc.Utf16LE.stringify(wordArray); */ stringify: function (wordArray) { // Shortcuts var words = wordArray.words; var sigBytes = wordArray.sigBytes; // Convert var utf16Chars = []; for (var i = 0; i < sigBytes; i += 2) { var codePoint = swapEndian((words[i >>> 2] >>> (16 - (i % 4) * 8)) & 0xffff); utf16Chars.push(String.fromCharCode(codePoint)); } return utf16Chars.join(''); }, /** * Converts a UTF-16 LE string to a word array. * * @param {string} utf16Str The UTF-16 LE string. * * @return {WordArray} The word array. * * @static * * @example * * var wordArray = CryptoJS.enc.Utf16LE.parse(utf16Str); */ parse: function (utf16Str) { // Shortcut var utf16StrLength = utf16Str.length; // Convert var words = []; for (var i = 0; i < utf16StrLength; i++) { words[i >>> 1] |= swapEndian(utf16Str.charCodeAt(i) << (16 - (i % 2) * 16)); } return WordArray.create(words, utf16StrLength * 2); } }; function swapEndian(word) { return ((word << 8) & 0xff00ff00) | ((word >>> 8) & 0x00ff00ff); } }()); return CryptoJS.enc.Utf16; })); },{"./core":46}],49:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./sha1"), _dereq_("./hmac")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./sha1", "./hmac"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var Base = C_lib.Base; var WordArray = C_lib.WordArray; var C_algo = C.algo; var MD5 = C_algo.MD5; /** * This key derivation function is meant to conform with EVP_BytesToKey. * www.openssl.org/docs/crypto/EVP_BytesToKey.html */ var EvpKDF = C_algo.EvpKDF = Base.extend({ /** * Configuration options. * * @property {number} keySize The key size in words to generate. Default: 4 (128 bits) * @property {Hasher} hasher The hash algorithm to use. Default: MD5 * @property {number} iterations The number of iterations to perform. Default: 1 */ cfg: Base.extend({ keySize: 128/32, hasher: MD5, iterations: 1 }), /** * Initializes a newly created key derivation function. * * @param {Object} cfg (Optional) The configuration options to use for the derivation. * * @example * * var kdf = CryptoJS.algo.EvpKDF.create(); * var kdf = CryptoJS.algo.EvpKDF.create({ keySize: 8 }); * var kdf = CryptoJS.algo.EvpKDF.create({ keySize: 8, iterations: 1000 }); */ init: function (cfg) { this.cfg = this.cfg.extend(cfg); }, /** * Derives a key from a password. * * @param {WordArray|string} password The password. * @param {WordArray|string} salt A salt. * * @return {WordArray} The derived key. * * @example * * var key = kdf.compute(password, salt); */ compute: function (password, salt) { // Shortcut var cfg = this.cfg; // Init hasher var hasher = cfg.hasher.create(); // Initial values var derivedKey = WordArray.create(); // Shortcuts var derivedKeyWords = derivedKey.words; var keySize = cfg.keySize; var iterations = cfg.iterations; // Generate key while (derivedKeyWords.length < keySize) { if (block) { hasher.update(block); } var block = hasher.update(password).finalize(salt); hasher.reset(); // Iterations for (var i = 1; i < iterations; i++) { block = hasher.finalize(block); hasher.reset(); } derivedKey.concat(block); } derivedKey.sigBytes = keySize * 4; return derivedKey; } }); /** * Derives a key from a password. * * @param {WordArray|string} password The password. * @param {WordArray|string} salt A salt. * @param {Object} cfg (Optional) The configuration options to use for this computation. * * @return {WordArray} The derived key. * * @static * * @example * * var key = CryptoJS.EvpKDF(password, salt); * var key = CryptoJS.EvpKDF(password, salt, { keySize: 8 }); * var key = CryptoJS.EvpKDF(password, salt, { keySize: 8, iterations: 1000 }); */ C.EvpKDF = function (password, salt, cfg) { return EvpKDF.create(cfg).compute(password, salt); }; }()); return CryptoJS.EvpKDF; })); },{"./core":46,"./hmac":51,"./sha1":70}],50:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function (undefined) { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var CipherParams = C_lib.CipherParams; var C_enc = C.enc; var Hex = C_enc.Hex; var C_format = C.format; var HexFormatter = C_format.Hex = { /** * Converts the ciphertext of a cipher params object to a hexadecimally encoded string. * * @param {CipherParams} cipherParams The cipher params object. * * @return {string} The hexadecimally encoded string. * * @static * * @example * * var hexString = CryptoJS.format.Hex.stringify(cipherParams); */ stringify: function (cipherParams) { return cipherParams.ciphertext.toString(Hex); }, /** * Converts a hexadecimally encoded ciphertext string to a cipher params object. * * @param {string} input The hexadecimally encoded string. * * @return {CipherParams} The cipher params object. * * @static * * @example * * var cipherParams = CryptoJS.format.Hex.parse(hexString); */ parse: function (input) { var ciphertext = Hex.parse(input); return CipherParams.create({ ciphertext: ciphertext }); } }; }()); return CryptoJS.format.Hex; })); },{"./cipher-core":45,"./core":46}],51:[function(_dereq_,module,exports){ ;(function (root, factory) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var Base = C_lib.Base; var C_enc = C.enc; var Utf8 = C_enc.Utf8; var C_algo = C.algo; /** * HMAC algorithm. */ var HMAC = C_algo.HMAC = Base.extend({ /** * Initializes a newly created HMAC. * * @param {Hasher} hasher The hash algorithm to use. * @param {WordArray|string} key The secret key. * * @example * * var hmacHasher = CryptoJS.algo.HMAC.create(CryptoJS.algo.SHA256, key); */ init: function (hasher, key) { // Init hasher hasher = this._hasher = new hasher.init(); // Convert string to WordArray, else assume WordArray already if (typeof key == 'string') { key = Utf8.parse(key); } // Shortcuts var hasherBlockSize = hasher.blockSize; var hasherBlockSizeBytes = hasherBlockSize * 4; // Allow arbitrary length keys if (key.sigBytes > hasherBlockSizeBytes) { key = hasher.finalize(key); } // Clamp excess bits key.clamp(); // Clone key for inner and outer pads var oKey = this._oKey = key.clone(); var iKey = this._iKey = key.clone(); // Shortcuts var oKeyWords = oKey.words; var iKeyWords = iKey.words; // XOR keys with pad constants for (var i = 0; i < hasherBlockSize; i++) { oKeyWords[i] ^= 0x5c5c5c5c; iKeyWords[i] ^= 0x36363636; } oKey.sigBytes = iKey.sigBytes = hasherBlockSizeBytes; // Set initial values this.reset(); }, /** * Resets this HMAC to its initial state. * * @example * * hmacHasher.reset(); */ reset: function () { // Shortcut var hasher = this._hasher; // Reset hasher.reset(); hasher.update(this._iKey); }, /** * Updates this HMAC with a message. * * @param {WordArray|string} messageUpdate The message to append. * * @return {HMAC} This HMAC instance. * * @example * * hmacHasher.update('message'); * hmacHasher.update(wordArray); */ update: function (messageUpdate) { this._hasher.update(messageUpdate); // Chainable return this; }, /** * Finalizes the HMAC computation. * Note that the finalize operation is effectively a destructive, read-once operation. * * @param {WordArray|string} messageUpdate (Optional) A final message update. * * @return {WordArray} The HMAC. * * @example * * var hmac = hmacHasher.finalize(); * var hmac = hmacHasher.finalize('message'); * var hmac = hmacHasher.finalize(wordArray); */ finalize: function (messageUpdate) { // Shortcut var hasher = this._hasher; // Compute HMAC var innerHash = hasher.finalize(messageUpdate); hasher.reset(); var hmac = hasher.finalize(this._oKey.clone().concat(innerHash)); return hmac; } }); }()); })); },{"./core":46}],52:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./x64-core"), _dereq_("./lib-typedarrays"), _dereq_("./enc-utf16"), _dereq_("./enc-base64"), _dereq_("./md5"), _dereq_("./sha1"), _dereq_("./sha256"), _dereq_("./sha224"), _dereq_("./sha512"), _dereq_("./sha384"), _dereq_("./sha3"), _dereq_("./ripemd160"), _dereq_("./hmac"), _dereq_("./pbkdf2"), _dereq_("./evpkdf"), _dereq_("./cipher-core"), _dereq_("./mode-cfb"), _dereq_("./mode-ctr"), _dereq_("./mode-ctr-gladman"), _dereq_("./mode-ofb"), _dereq_("./mode-ecb"), _dereq_("./pad-ansix923"), _dereq_("./pad-iso10126"), _dereq_("./pad-iso97971"), _dereq_("./pad-zeropadding"), _dereq_("./pad-nopadding"), _dereq_("./format-hex"), _dereq_("./aes"), _dereq_("./tripledes"), _dereq_("./rc4"), _dereq_("./rabbit"), _dereq_("./rabbit-legacy")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./x64-core", "./lib-typedarrays", "./enc-utf16", "./enc-base64", "./md5", "./sha1", "./sha256", "./sha224", "./sha512", "./sha384", "./sha3", "./ripemd160", "./hmac", "./pbkdf2", "./evpkdf", "./cipher-core", "./mode-cfb", "./mode-ctr", "./mode-ctr-gladman", "./mode-ofb", "./mode-ecb", "./pad-ansix923", "./pad-iso10126", "./pad-iso97971", "./pad-zeropadding", "./pad-nopadding", "./format-hex", "./aes", "./tripledes", "./rc4", "./rabbit", "./rabbit-legacy"], factory); } else { // Global (browser) root.CryptoJS = factory(root.CryptoJS); } }(this, function (CryptoJS) { return CryptoJS; })); },{"./aes":44,"./cipher-core":45,"./core":46,"./enc-base64":47,"./enc-utf16":48,"./evpkdf":49,"./format-hex":50,"./hmac":51,"./lib-typedarrays":53,"./md5":54,"./mode-cfb":55,"./mode-ctr":57,"./mode-ctr-gladman":56,"./mode-ecb":58,"./mode-ofb":59,"./pad-ansix923":60,"./pad-iso10126":61,"./pad-iso97971":62,"./pad-nopadding":63,"./pad-zeropadding":64,"./pbkdf2":65,"./rabbit":67,"./rabbit-legacy":66,"./rc4":68,"./ripemd160":69,"./sha1":70,"./sha224":71,"./sha256":72,"./sha3":73,"./sha384":74,"./sha512":75,"./tripledes":76,"./x64-core":77}],53:[function(_dereq_,module,exports){ ;(function (root, factory) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Check if typed arrays are supported if (typeof ArrayBuffer != 'function') { return; } // Shortcuts var C = CryptoJS; var C_lib = C.lib; var WordArray = C_lib.WordArray; // Reference original init var superInit = WordArray.init; // Augment WordArray.init to handle typed arrays var subInit = WordArray.init = function (typedArray) { // Convert buffers to uint8 if (typedArray instanceof ArrayBuffer) { typedArray = new Uint8Array(typedArray); } // Convert other array views to uint8 if ( typedArray instanceof Int8Array || (typeof Uint8ClampedArray !== "undefined" && typedArray instanceof Uint8ClampedArray) || typedArray instanceof Int16Array || typedArray instanceof Uint16Array || typedArray instanceof Int32Array || typedArray instanceof Uint32Array || typedArray instanceof Float32Array || typedArray instanceof Float64Array ) { typedArray = new Uint8Array(typedArray.buffer, typedArray.byteOffset, typedArray.byteLength); } // Handle Uint8Array if (typedArray instanceof Uint8Array) { // Shortcut var typedArrayByteLength = typedArray.byteLength; // Extract bytes var words = []; for (var i = 0; i < typedArrayByteLength; i++) { words[i >>> 2] |= typedArray[i] << (24 - (i % 4) * 8); } // Initialize this word array superInit.call(this, words, typedArrayByteLength); } else { // Else call normal init superInit.apply(this, arguments); } }; subInit.prototype = WordArray; }()); return CryptoJS.lib.WordArray; })); },{"./core":46}],54:[function(_dereq_,module,exports){ ;(function (root, factory) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function (Math) { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var WordArray = C_lib.WordArray; var Hasher = C_lib.Hasher; var C_algo = C.algo; // Constants table var T = []; // Compute constants (function () { for (var i = 0; i < 64; i++) { T[i] = (Math.abs(Math.sin(i + 1)) * 0x100000000) | 0; } }()); /** * MD5 hash algorithm. */ var MD5 = C_algo.MD5 = Hasher.extend({ _doReset: function () { this._hash = new WordArray.init([ 0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476 ]); }, _doProcessBlock: function (M, offset) { // Swap endian for (var i = 0; i < 16; i++) { // Shortcuts var offset_i = offset + i; var M_offset_i = M[offset_i]; M[offset_i] = ( (((M_offset_i << 8) | (M_offset_i >>> 24)) & 0x00ff00ff) | (((M_offset_i << 24) | (M_offset_i >>> 8)) & 0xff00ff00) ); } // Shortcuts var H = this._hash.words; var M_offset_0 = M[offset + 0]; var M_offset_1 = M[offset + 1]; var M_offset_2 = M[offset + 2]; var M_offset_3 = M[offset + 3]; var M_offset_4 = M[offset + 4]; var M_offset_5 = M[offset + 5]; var M_offset_6 = M[offset + 6]; var M_offset_7 = M[offset + 7]; var M_offset_8 = M[offset + 8]; var M_offset_9 = M[offset + 9]; var M_offset_10 = M[offset + 10]; var M_offset_11 = M[offset + 11]; var M_offset_12 = M[offset + 12]; var M_offset_13 = M[offset + 13]; var M_offset_14 = M[offset + 14]; var M_offset_15 = M[offset + 15]; // Working varialbes var a = H[0]; var b = H[1]; var c = H[2]; var d = H[3]; // Computation a = FF(a, b, c, d, M_offset_0, 7, T[0]); d = FF(d, a, b, c, M_offset_1, 12, T[1]); c = FF(c, d, a, b, M_offset_2, 17, T[2]); b = FF(b, c, d, a, M_offset_3, 22, T[3]); a = FF(a, b, c, d, M_offset_4, 7, T[4]); d = FF(d, a, b, c, M_offset_5, 12, T[5]); c = FF(c, d, a, b, M_offset_6, 17, T[6]); b = FF(b, c, d, a, M_offset_7, 22, T[7]); a = FF(a, b, c, d, M_offset_8, 7, T[8]); d = FF(d, a, b, c, M_offset_9, 12, T[9]); c = FF(c, d, a, b, M_offset_10, 17, T[10]); b = FF(b, c, d, a, M_offset_11, 22, T[11]); a = FF(a, b, c, d, M_offset_12, 7, T[12]); d = FF(d, a, b, c, M_offset_13, 12, T[13]); c = FF(c, d, a, b, M_offset_14, 17, T[14]); b = FF(b, c, d, a, M_offset_15, 22, T[15]); a = GG(a, b, c, d, M_offset_1, 5, T[16]); d = GG(d, a, b, c, M_offset_6, 9, T[17]); c = GG(c, d, a, b, M_offset_11, 14, T[18]); b = GG(b, c, d, a, M_offset_0, 20, T[19]); a = GG(a, b, c, d, M_offset_5, 5, T[20]); d = GG(d, a, b, c, M_offset_10, 9, T[21]); c = GG(c, d, a, b, M_offset_15, 14, T[22]); b = GG(b, c, d, a, M_offset_4, 20, T[23]); a = GG(a, b, c, d, M_offset_9, 5, T[24]); d = GG(d, a, b, c, M_offset_14, 9, T[25]); c = GG(c, d, a, b, M_offset_3, 14, T[26]); b = GG(b, c, d, a, M_offset_8, 20, T[27]); a = GG(a, b, c, d, M_offset_13, 5, T[28]); d = GG(d, a, b, c, M_offset_2, 9, T[29]); c = GG(c, d, a, b, M_offset_7, 14, T[30]); b = GG(b, c, d, a, M_offset_12, 20, T[31]); a = HH(a, b, c, d, M_offset_5, 4, T[32]); d = HH(d, a, b, c, M_offset_8, 11, T[33]); c = HH(c, d, a, b, M_offset_11, 16, T[34]); b = HH(b, c, d, a, M_offset_14, 23, T[35]); a = HH(a, b, c, d, M_offset_1, 4, T[36]); d = HH(d, a, b, c, M_offset_4, 11, T[37]); c = HH(c, d, a, b, M_offset_7, 16, T[38]); b = HH(b, c, d, a, M_offset_10, 23, T[39]); a = HH(a, b, c, d, M_offset_13, 4, T[40]); d = HH(d, a, b, c, M_offset_0, 11, T[41]); c = HH(c, d, a, b, M_offset_3, 16, T[42]); b = HH(b, c, d, a, M_offset_6, 23, T[43]); a = HH(a, b, c, d, M_offset_9, 4, T[44]); d = HH(d, a, b, c, M_offset_12, 11, T[45]); c = HH(c, d, a, b, M_offset_15, 16, T[46]); b = HH(b, c, d, a, M_offset_2, 23, T[47]); a = II(a, b, c, d, M_offset_0, 6, T[48]); d = II(d, a, b, c, M_offset_7, 10, T[49]); c = II(c, d, a, b, M_offset_14, 15, T[50]); b = II(b, c, d, a, M_offset_5, 21, T[51]); a = II(a, b, c, d, M_offset_12, 6, T[52]); d = II(d, a, b, c, M_offset_3, 10, T[53]); c = II(c, d, a, b, M_offset_10, 15, T[54]); b = II(b, c, d, a, M_offset_1, 21, T[55]); a = II(a, b, c, d, M_offset_8, 6, T[56]); d = II(d, a, b, c, M_offset_15, 10, T[57]); c = II(c, d, a, b, M_offset_6, 15, T[58]); b = II(b, c, d, a, M_offset_13, 21, T[59]); a = II(a, b, c, d, M_offset_4, 6, T[60]); d = II(d, a, b, c, M_offset_11, 10, T[61]); c = II(c, d, a, b, M_offset_2, 15, T[62]); b = II(b, c, d, a, M_offset_9, 21, T[63]); // Intermediate hash value H[0] = (H[0] + a) | 0; H[1] = (H[1] + b) | 0; H[2] = (H[2] + c) | 0; H[3] = (H[3] + d) | 0; }, _doFinalize: function () { // Shortcuts var data = this._data; var dataWords = data.words; var nBitsTotal = this._nDataBytes * 8; var nBitsLeft = data.sigBytes * 8; // Add padding dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); var nBitsTotalH = Math.floor(nBitsTotal / 0x100000000); var nBitsTotalL = nBitsTotal; dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = ( (((nBitsTotalH << 8) | (nBitsTotalH >>> 24)) & 0x00ff00ff) | (((nBitsTotalH << 24) | (nBitsTotalH >>> 8)) & 0xff00ff00) ); dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = ( (((nBitsTotalL << 8) | (nBitsTotalL >>> 24)) & 0x00ff00ff) | (((nBitsTotalL << 24) | (nBitsTotalL >>> 8)) & 0xff00ff00) ); data.sigBytes = (dataWords.length + 1) * 4; // Hash final blocks this._process(); // Shortcuts var hash = this._hash; var H = hash.words; // Swap endian for (var i = 0; i < 4; i++) { // Shortcut var H_i = H[i]; H[i] = (((H_i << 8) | (H_i >>> 24)) & 0x00ff00ff) | (((H_i << 24) | (H_i >>> 8)) & 0xff00ff00); } // Return final computed hash return hash; }, clone: function () { var clone = Hasher.clone.call(this); clone._hash = this._hash.clone(); return clone; } }); function FF(a, b, c, d, x, s, t) { var n = a + ((b & c) | (~b & d)) + x + t; return ((n << s) | (n >>> (32 - s))) + b; } function GG(a, b, c, d, x, s, t) { var n = a + ((b & d) | (c & ~d)) + x + t; return ((n << s) | (n >>> (32 - s))) + b; } function HH(a, b, c, d, x, s, t) { var n = a + (b ^ c ^ d) + x + t; return ((n << s) | (n >>> (32 - s))) + b; } function II(a, b, c, d, x, s, t) { var n = a + (c ^ (b | ~d)) + x + t; return ((n << s) | (n >>> (32 - s))) + b; } /** * Shortcut function to the hasher's object interface. * * @param {WordArray|string} message The message to hash. * * @return {WordArray} The hash. * * @static * * @example * * var hash = CryptoJS.MD5('message'); * var hash = CryptoJS.MD5(wordArray); */ C.MD5 = Hasher._createHelper(MD5); /** * Shortcut function to the HMAC's object interface. * * @param {WordArray|string} message The message to hash. * @param {WordArray|string} key The secret key. * * @return {WordArray} The HMAC. * * @static * * @example * * var hmac = CryptoJS.HmacMD5(message, key); */ C.HmacMD5 = Hasher._createHmacHelper(MD5); }(Math)); return CryptoJS.MD5; })); },{"./core":46}],55:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { /** * Cipher Feedback block mode. */ CryptoJS.mode.CFB = (function () { var CFB = CryptoJS.lib.BlockCipherMode.extend(); CFB.Encryptor = CFB.extend({ processBlock: function (words, offset) { // Shortcuts var cipher = this._cipher; var blockSize = cipher.blockSize; generateKeystreamAndEncrypt.call(this, words, offset, blockSize, cipher); // Remember this block to use with next block this._prevBlock = words.slice(offset, offset + blockSize); } }); CFB.Decryptor = CFB.extend({ processBlock: function (words, offset) { // Shortcuts var cipher = this._cipher; var blockSize = cipher.blockSize; // Remember this block to use with next block var thisBlock = words.slice(offset, offset + blockSize); generateKeystreamAndEncrypt.call(this, words, offset, blockSize, cipher); // This block becomes the previous block this._prevBlock = thisBlock; } }); function generateKeystreamAndEncrypt(words, offset, blockSize, cipher) { // Shortcut var iv = this._iv; // Generate keystream if (iv) { var keystream = iv.slice(0); // Remove IV for subsequent blocks this._iv = undefined; } else { var keystream = this._prevBlock; } cipher.encryptBlock(keystream, 0); // Encrypt for (var i = 0; i < blockSize; i++) { words[offset + i] ^= keystream[i]; } } return CFB; }()); return CryptoJS.mode.CFB; })); },{"./cipher-core":45,"./core":46}],56:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { /** @preserve * Counter block mode compatible with Dr Brian Gladman fileenc.c * derived from CryptoJS.mode.CTR * Jan Hruby jhruby.web@gmail.com */ CryptoJS.mode.CTRGladman = (function () { var CTRGladman = CryptoJS.lib.BlockCipherMode.extend(); function incWord(word) { if (((word >> 24) & 0xff) === 0xff) { //overflow var b1 = (word >> 16)&0xff; var b2 = (word >> 8)&0xff; var b3 = word & 0xff; if (b1 === 0xff) // overflow b1 { b1 = 0; if (b2 === 0xff) { b2 = 0; if (b3 === 0xff) { b3 = 0; } else { ++b3; } } else { ++b2; } } else { ++b1; } word = 0; word += (b1 << 16); word += (b2 << 8); word += b3; } else { word += (0x01 << 24); } return word; } function incCounter(counter) { if ((counter[0] = incWord(counter[0])) === 0) { // encr_data in fileenc.c from Dr Brian Gladman's counts only with DWORD j < 8 counter[1] = incWord(counter[1]); } return counter; } var Encryptor = CTRGladman.Encryptor = CTRGladman.extend({ processBlock: function (words, offset) { // Shortcuts var cipher = this._cipher var blockSize = cipher.blockSize; var iv = this._iv; var counter = this._counter; // Generate keystream if (iv) { counter = this._counter = iv.slice(0); // Remove IV for subsequent blocks this._iv = undefined; } incCounter(counter); var keystream = counter.slice(0); cipher.encryptBlock(keystream, 0); // Encrypt for (var i = 0; i < blockSize; i++) { words[offset + i] ^= keystream[i]; } } }); CTRGladman.Decryptor = Encryptor; return CTRGladman; }()); return CryptoJS.mode.CTRGladman; })); },{"./cipher-core":45,"./core":46}],57:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { /** * Counter block mode. */ CryptoJS.mode.CTR = (function () { var CTR = CryptoJS.lib.BlockCipherMode.extend(); var Encryptor = CTR.Encryptor = CTR.extend({ processBlock: function (words, offset) { // Shortcuts var cipher = this._cipher var blockSize = cipher.blockSize; var iv = this._iv; var counter = this._counter; // Generate keystream if (iv) { counter = this._counter = iv.slice(0); // Remove IV for subsequent blocks this._iv = undefined; } var keystream = counter.slice(0); cipher.encryptBlock(keystream, 0); // Increment counter counter[blockSize - 1] = (counter[blockSize - 1] + 1) | 0 // Encrypt for (var i = 0; i < blockSize; i++) { words[offset + i] ^= keystream[i]; } } }); CTR.Decryptor = Encryptor; return CTR; }()); return CryptoJS.mode.CTR; })); },{"./cipher-core":45,"./core":46}],58:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { /** * Electronic Codebook block mode. */ CryptoJS.mode.ECB = (function () { var ECB = CryptoJS.lib.BlockCipherMode.extend(); ECB.Encryptor = ECB.extend({ processBlock: function (words, offset) { this._cipher.encryptBlock(words, offset); } }); ECB.Decryptor = ECB.extend({ processBlock: function (words, offset) { this._cipher.decryptBlock(words, offset); } }); return ECB; }()); return CryptoJS.mode.ECB; })); },{"./cipher-core":45,"./core":46}],59:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { /** * Output Feedback block mode. */ CryptoJS.mode.OFB = (function () { var OFB = CryptoJS.lib.BlockCipherMode.extend(); var Encryptor = OFB.Encryptor = OFB.extend({ processBlock: function (words, offset) { // Shortcuts var cipher = this._cipher var blockSize = cipher.blockSize; var iv = this._iv; var keystream = this._keystream; // Generate keystream if (iv) { keystream = this._keystream = iv.slice(0); // Remove IV for subsequent blocks this._iv = undefined; } cipher.encryptBlock(keystream, 0); // Encrypt for (var i = 0; i < blockSize; i++) { words[offset + i] ^= keystream[i]; } } }); OFB.Decryptor = Encryptor; return OFB; }()); return CryptoJS.mode.OFB; })); },{"./cipher-core":45,"./core":46}],60:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { /** * ANSI X.923 padding strategy. */ CryptoJS.pad.AnsiX923 = { pad: function (data, blockSize) { // Shortcuts var dataSigBytes = data.sigBytes; var blockSizeBytes = blockSize * 4; // Count padding bytes var nPaddingBytes = blockSizeBytes - dataSigBytes % blockSizeBytes; // Compute last byte position var lastBytePos = dataSigBytes + nPaddingBytes - 1; // Pad data.clamp(); data.words[lastBytePos >>> 2] |= nPaddingBytes << (24 - (lastBytePos % 4) * 8); data.sigBytes += nPaddingBytes; }, unpad: function (data) { // Get number of padding bytes from last byte var nPaddingBytes = data.words[(data.sigBytes - 1) >>> 2] & 0xff; // Remove padding data.sigBytes -= nPaddingBytes; } }; return CryptoJS.pad.Ansix923; })); },{"./cipher-core":45,"./core":46}],61:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { /** * ISO 10126 padding strategy. */ CryptoJS.pad.Iso10126 = { pad: function (data, blockSize) { // Shortcut var blockSizeBytes = blockSize * 4; // Count padding bytes var nPaddingBytes = blockSizeBytes - data.sigBytes % blockSizeBytes; // Pad data.concat(CryptoJS.lib.WordArray.random(nPaddingBytes - 1)). concat(CryptoJS.lib.WordArray.create([nPaddingBytes << 24], 1)); }, unpad: function (data) { // Get number of padding bytes from last byte var nPaddingBytes = data.words[(data.sigBytes - 1) >>> 2] & 0xff; // Remove padding data.sigBytes -= nPaddingBytes; } }; return CryptoJS.pad.Iso10126; })); },{"./cipher-core":45,"./core":46}],62:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { /** * ISO/IEC 9797-1 Padding Method 2. */ CryptoJS.pad.Iso97971 = { pad: function (data, blockSize) { // Add 0x80 byte data.concat(CryptoJS.lib.WordArray.create([0x80000000], 1)); // Zero pad the rest CryptoJS.pad.ZeroPadding.pad(data, blockSize); }, unpad: function (data) { // Remove zero padding CryptoJS.pad.ZeroPadding.unpad(data); // Remove one more byte -- the 0x80 byte data.sigBytes--; } }; return CryptoJS.pad.Iso97971; })); },{"./cipher-core":45,"./core":46}],63:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { /** * A noop padding strategy. */ CryptoJS.pad.NoPadding = { pad: function () { }, unpad: function () { } }; return CryptoJS.pad.NoPadding; })); },{"./cipher-core":45,"./core":46}],64:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { /** * Zero padding strategy. */ CryptoJS.pad.ZeroPadding = { pad: function (data, blockSize) { // Shortcut var blockSizeBytes = blockSize * 4; // Pad data.clamp(); data.sigBytes += blockSizeBytes - ((data.sigBytes % blockSizeBytes) || blockSizeBytes); }, unpad: function (data) { // Shortcut var dataWords = data.words; // Unpad var i = data.sigBytes - 1; while (!((dataWords[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff)) { i--; } data.sigBytes = i + 1; } }; return CryptoJS.pad.ZeroPadding; })); },{"./cipher-core":45,"./core":46}],65:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./sha1"), _dereq_("./hmac")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./sha1", "./hmac"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var Base = C_lib.Base; var WordArray = C_lib.WordArray; var C_algo = C.algo; var SHA1 = C_algo.SHA1; var HMAC = C_algo.HMAC; /** * Password-Based Key Derivation Function 2 algorithm. */ var PBKDF2 = C_algo.PBKDF2 = Base.extend({ /** * Configuration options. * * @property {number} keySize The key size in words to generate. Default: 4 (128 bits) * @property {Hasher} hasher The hasher to use. Default: SHA1 * @property {number} iterations The number of iterations to perform. Default: 1 */ cfg: Base.extend({ keySize: 128/32, hasher: SHA1, iterations: 1 }), /** * Initializes a newly created key derivation function. * * @param {Object} cfg (Optional) The configuration options to use for the derivation. * * @example * * var kdf = CryptoJS.algo.PBKDF2.create(); * var kdf = CryptoJS.algo.PBKDF2.create({ keySize: 8 }); * var kdf = CryptoJS.algo.PBKDF2.create({ keySize: 8, iterations: 1000 }); */ init: function (cfg) { this.cfg = this.cfg.extend(cfg); }, /** * Computes the Password-Based Key Derivation Function 2. * * @param {WordArray|string} password The password. * @param {WordArray|string} salt A salt. * * @return {WordArray} The derived key. * * @example * * var key = kdf.compute(password, salt); */ compute: function (password, salt) { // Shortcut var cfg = this.cfg; // Init HMAC var hmac = HMAC.create(cfg.hasher, password); // Initial values var derivedKey = WordArray.create(); var blockIndex = WordArray.create([0x00000001]); // Shortcuts var derivedKeyWords = derivedKey.words; var blockIndexWords = blockIndex.words; var keySize = cfg.keySize; var iterations = cfg.iterations; // Generate key while (derivedKeyWords.length < keySize) { var block = hmac.update(salt).finalize(blockIndex); hmac.reset(); // Shortcuts var blockWords = block.words; var blockWordsLength = blockWords.length; // Iterations var intermediate = block; for (var i = 1; i < iterations; i++) { intermediate = hmac.finalize(intermediate); hmac.reset(); // Shortcut var intermediateWords = intermediate.words; // XOR intermediate with block for (var j = 0; j < blockWordsLength; j++) { blockWords[j] ^= intermediateWords[j]; } } derivedKey.concat(block); blockIndexWords[0]++; } derivedKey.sigBytes = keySize * 4; return derivedKey; } }); /** * Computes the Password-Based Key Derivation Function 2. * * @param {WordArray|string} password The password. * @param {WordArray|string} salt A salt. * @param {Object} cfg (Optional) The configuration options to use for this computation. * * @return {WordArray} The derived key. * * @static * * @example * * var key = CryptoJS.PBKDF2(password, salt); * var key = CryptoJS.PBKDF2(password, salt, { keySize: 8 }); * var key = CryptoJS.PBKDF2(password, salt, { keySize: 8, iterations: 1000 }); */ C.PBKDF2 = function (password, salt, cfg) { return PBKDF2.create(cfg).compute(password, salt); }; }()); return CryptoJS.PBKDF2; })); },{"./core":46,"./hmac":51,"./sha1":70}],66:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./enc-base64"), _dereq_("./md5"), _dereq_("./evpkdf"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./enc-base64", "./md5", "./evpkdf", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var StreamCipher = C_lib.StreamCipher; var C_algo = C.algo; // Reusable objects var S = []; var C_ = []; var G = []; /** * Rabbit stream cipher algorithm. * * This is a legacy version that neglected to convert the key to little-endian. * This error doesn't affect the cipher's security, * but it does affect its compatibility with other implementations. */ var RabbitLegacy = C_algo.RabbitLegacy = StreamCipher.extend({ _doReset: function () { // Shortcuts var K = this._key.words; var iv = this.cfg.iv; // Generate initial state values var X = this._X = [ K[0], (K[3] << 16) | (K[2] >>> 16), K[1], (K[0] << 16) | (K[3] >>> 16), K[2], (K[1] << 16) | (K[0] >>> 16), K[3], (K[2] << 16) | (K[1] >>> 16) ]; // Generate initial counter values var C = this._C = [ (K[2] << 16) | (K[2] >>> 16), (K[0] & 0xffff0000) | (K[1] & 0x0000ffff), (K[3] << 16) | (K[3] >>> 16), (K[1] & 0xffff0000) | (K[2] & 0x0000ffff), (K[0] << 16) | (K[0] >>> 16), (K[2] & 0xffff0000) | (K[3] & 0x0000ffff), (K[1] << 16) | (K[1] >>> 16), (K[3] & 0xffff0000) | (K[0] & 0x0000ffff) ]; // Carry bit this._b = 0; // Iterate the system four times for (var i = 0; i < 4; i++) { nextState.call(this); } // Modify the counters for (var i = 0; i < 8; i++) { C[i] ^= X[(i + 4) & 7]; } // IV setup if (iv) { // Shortcuts var IV = iv.words; var IV_0 = IV[0]; var IV_1 = IV[1]; // Generate four subvectors var i0 = (((IV_0 << 8) | (IV_0 >>> 24)) & 0x00ff00ff) | (((IV_0 << 24) | (IV_0 >>> 8)) & 0xff00ff00); var i2 = (((IV_1 << 8) | (IV_1 >>> 24)) & 0x00ff00ff) | (((IV_1 << 24) | (IV_1 >>> 8)) & 0xff00ff00); var i1 = (i0 >>> 16) | (i2 & 0xffff0000); var i3 = (i2 << 16) | (i0 & 0x0000ffff); // Modify counter values C[0] ^= i0; C[1] ^= i1; C[2] ^= i2; C[3] ^= i3; C[4] ^= i0; C[5] ^= i1; C[6] ^= i2; C[7] ^= i3; // Iterate the system four times for (var i = 0; i < 4; i++) { nextState.call(this); } } }, _doProcessBlock: function (M, offset) { // Shortcut var X = this._X; // Iterate the system nextState.call(this); // Generate four keystream words S[0] = X[0] ^ (X[5] >>> 16) ^ (X[3] << 16); S[1] = X[2] ^ (X[7] >>> 16) ^ (X[5] << 16); S[2] = X[4] ^ (X[1] >>> 16) ^ (X[7] << 16); S[3] = X[6] ^ (X[3] >>> 16) ^ (X[1] << 16); for (var i = 0; i < 4; i++) { // Swap endian S[i] = (((S[i] << 8) | (S[i] >>> 24)) & 0x00ff00ff) | (((S[i] << 24) | (S[i] >>> 8)) & 0xff00ff00); // Encrypt M[offset + i] ^= S[i]; } }, blockSize: 128/32, ivSize: 64/32 }); function nextState() { // Shortcuts var X = this._X; var C = this._C; // Save old counter values for (var i = 0; i < 8; i++) { C_[i] = C[i]; } // Calculate new counter values C[0] = (C[0] + 0x4d34d34d + this._b) | 0; C[1] = (C[1] + 0xd34d34d3 + ((C[0] >>> 0) < (C_[0] >>> 0) ? 1 : 0)) | 0; C[2] = (C[2] + 0x34d34d34 + ((C[1] >>> 0) < (C_[1] >>> 0) ? 1 : 0)) | 0; C[3] = (C[3] + 0x4d34d34d + ((C[2] >>> 0) < (C_[2] >>> 0) ? 1 : 0)) | 0; C[4] = (C[4] + 0xd34d34d3 + ((C[3] >>> 0) < (C_[3] >>> 0) ? 1 : 0)) | 0; C[5] = (C[5] + 0x34d34d34 + ((C[4] >>> 0) < (C_[4] >>> 0) ? 1 : 0)) | 0; C[6] = (C[6] + 0x4d34d34d + ((C[5] >>> 0) < (C_[5] >>> 0) ? 1 : 0)) | 0; C[7] = (C[7] + 0xd34d34d3 + ((C[6] >>> 0) < (C_[6] >>> 0) ? 1 : 0)) | 0; this._b = (C[7] >>> 0) < (C_[7] >>> 0) ? 1 : 0; // Calculate the g-values for (var i = 0; i < 8; i++) { var gx = X[i] + C[i]; // Construct high and low argument for squaring var ga = gx & 0xffff; var gb = gx >>> 16; // Calculate high and low result of squaring var gh = ((((ga * ga) >>> 17) + ga * gb) >>> 15) + gb * gb; var gl = (((gx & 0xffff0000) * gx) | 0) + (((gx & 0x0000ffff) * gx) | 0); // High XOR low G[i] = gh ^ gl; } // Calculate new state values X[0] = (G[0] + ((G[7] << 16) | (G[7] >>> 16)) + ((G[6] << 16) | (G[6] >>> 16))) | 0; X[1] = (G[1] + ((G[0] << 8) | (G[0] >>> 24)) + G[7]) | 0; X[2] = (G[2] + ((G[1] << 16) | (G[1] >>> 16)) + ((G[0] << 16) | (G[0] >>> 16))) | 0; X[3] = (G[3] + ((G[2] << 8) | (G[2] >>> 24)) + G[1]) | 0; X[4] = (G[4] + ((G[3] << 16) | (G[3] >>> 16)) + ((G[2] << 16) | (G[2] >>> 16))) | 0; X[5] = (G[5] + ((G[4] << 8) | (G[4] >>> 24)) + G[3]) | 0; X[6] = (G[6] + ((G[5] << 16) | (G[5] >>> 16)) + ((G[4] << 16) | (G[4] >>> 16))) | 0; X[7] = (G[7] + ((G[6] << 8) | (G[6] >>> 24)) + G[5]) | 0; } /** * Shortcut functions to the cipher's object interface. * * @example * * var ciphertext = CryptoJS.RabbitLegacy.encrypt(message, key, cfg); * var plaintext = CryptoJS.RabbitLegacy.decrypt(ciphertext, key, cfg); */ C.RabbitLegacy = StreamCipher._createHelper(RabbitLegacy); }()); return CryptoJS.RabbitLegacy; })); },{"./cipher-core":45,"./core":46,"./enc-base64":47,"./evpkdf":49,"./md5":54}],67:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./enc-base64"), _dereq_("./md5"), _dereq_("./evpkdf"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./enc-base64", "./md5", "./evpkdf", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var StreamCipher = C_lib.StreamCipher; var C_algo = C.algo; // Reusable objects var S = []; var C_ = []; var G = []; /** * Rabbit stream cipher algorithm */ var Rabbit = C_algo.Rabbit = StreamCipher.extend({ _doReset: function () { // Shortcuts var K = this._key.words; var iv = this.cfg.iv; // Swap endian for (var i = 0; i < 4; i++) { K[i] = (((K[i] << 8) | (K[i] >>> 24)) & 0x00ff00ff) | (((K[i] << 24) | (K[i] >>> 8)) & 0xff00ff00); } // Generate initial state values var X = this._X = [ K[0], (K[3] << 16) | (K[2] >>> 16), K[1], (K[0] << 16) | (K[3] >>> 16), K[2], (K[1] << 16) | (K[0] >>> 16), K[3], (K[2] << 16) | (K[1] >>> 16) ]; // Generate initial counter values var C = this._C = [ (K[2] << 16) | (K[2] >>> 16), (K[0] & 0xffff0000) | (K[1] & 0x0000ffff), (K[3] << 16) | (K[3] >>> 16), (K[1] & 0xffff0000) | (K[2] & 0x0000ffff), (K[0] << 16) | (K[0] >>> 16), (K[2] & 0xffff0000) | (K[3] & 0x0000ffff), (K[1] << 16) | (K[1] >>> 16), (K[3] & 0xffff0000) | (K[0] & 0x0000ffff) ]; // Carry bit this._b = 0; // Iterate the system four times for (var i = 0; i < 4; i++) { nextState.call(this); } // Modify the counters for (var i = 0; i < 8; i++) { C[i] ^= X[(i + 4) & 7]; } // IV setup if (iv) { // Shortcuts var IV = iv.words; var IV_0 = IV[0]; var IV_1 = IV[1]; // Generate four subvectors var i0 = (((IV_0 << 8) | (IV_0 >>> 24)) & 0x00ff00ff) | (((IV_0 << 24) | (IV_0 >>> 8)) & 0xff00ff00); var i2 = (((IV_1 << 8) | (IV_1 >>> 24)) & 0x00ff00ff) | (((IV_1 << 24) | (IV_1 >>> 8)) & 0xff00ff00); var i1 = (i0 >>> 16) | (i2 & 0xffff0000); var i3 = (i2 << 16) | (i0 & 0x0000ffff); // Modify counter values C[0] ^= i0; C[1] ^= i1; C[2] ^= i2; C[3] ^= i3; C[4] ^= i0; C[5] ^= i1; C[6] ^= i2; C[7] ^= i3; // Iterate the system four times for (var i = 0; i < 4; i++) { nextState.call(this); } } }, _doProcessBlock: function (M, offset) { // Shortcut var X = this._X; // Iterate the system nextState.call(this); // Generate four keystream words S[0] = X[0] ^ (X[5] >>> 16) ^ (X[3] << 16); S[1] = X[2] ^ (X[7] >>> 16) ^ (X[5] << 16); S[2] = X[4] ^ (X[1] >>> 16) ^ (X[7] << 16); S[3] = X[6] ^ (X[3] >>> 16) ^ (X[1] << 16); for (var i = 0; i < 4; i++) { // Swap endian S[i] = (((S[i] << 8) | (S[i] >>> 24)) & 0x00ff00ff) | (((S[i] << 24) | (S[i] >>> 8)) & 0xff00ff00); // Encrypt M[offset + i] ^= S[i]; } }, blockSize: 128/32, ivSize: 64/32 }); function nextState() { // Shortcuts var X = this._X; var C = this._C; // Save old counter values for (var i = 0; i < 8; i++) { C_[i] = C[i]; } // Calculate new counter values C[0] = (C[0] + 0x4d34d34d + this._b) | 0; C[1] = (C[1] + 0xd34d34d3 + ((C[0] >>> 0) < (C_[0] >>> 0) ? 1 : 0)) | 0; C[2] = (C[2] + 0x34d34d34 + ((C[1] >>> 0) < (C_[1] >>> 0) ? 1 : 0)) | 0; C[3] = (C[3] + 0x4d34d34d + ((C[2] >>> 0) < (C_[2] >>> 0) ? 1 : 0)) | 0; C[4] = (C[4] + 0xd34d34d3 + ((C[3] >>> 0) < (C_[3] >>> 0) ? 1 : 0)) | 0; C[5] = (C[5] + 0x34d34d34 + ((C[4] >>> 0) < (C_[4] >>> 0) ? 1 : 0)) | 0; C[6] = (C[6] + 0x4d34d34d + ((C[5] >>> 0) < (C_[5] >>> 0) ? 1 : 0)) | 0; C[7] = (C[7] + 0xd34d34d3 + ((C[6] >>> 0) < (C_[6] >>> 0) ? 1 : 0)) | 0; this._b = (C[7] >>> 0) < (C_[7] >>> 0) ? 1 : 0; // Calculate the g-values for (var i = 0; i < 8; i++) { var gx = X[i] + C[i]; // Construct high and low argument for squaring var ga = gx & 0xffff; var gb = gx >>> 16; // Calculate high and low result of squaring var gh = ((((ga * ga) >>> 17) + ga * gb) >>> 15) + gb * gb; var gl = (((gx & 0xffff0000) * gx) | 0) + (((gx & 0x0000ffff) * gx) | 0); // High XOR low G[i] = gh ^ gl; } // Calculate new state values X[0] = (G[0] + ((G[7] << 16) | (G[7] >>> 16)) + ((G[6] << 16) | (G[6] >>> 16))) | 0; X[1] = (G[1] + ((G[0] << 8) | (G[0] >>> 24)) + G[7]) | 0; X[2] = (G[2] + ((G[1] << 16) | (G[1] >>> 16)) + ((G[0] << 16) | (G[0] >>> 16))) | 0; X[3] = (G[3] + ((G[2] << 8) | (G[2] >>> 24)) + G[1]) | 0; X[4] = (G[4] + ((G[3] << 16) | (G[3] >>> 16)) + ((G[2] << 16) | (G[2] >>> 16))) | 0; X[5] = (G[5] + ((G[4] << 8) | (G[4] >>> 24)) + G[3]) | 0; X[6] = (G[6] + ((G[5] << 16) | (G[5] >>> 16)) + ((G[4] << 16) | (G[4] >>> 16))) | 0; X[7] = (G[7] + ((G[6] << 8) | (G[6] >>> 24)) + G[5]) | 0; } /** * Shortcut functions to the cipher's object interface. * * @example * * var ciphertext = CryptoJS.Rabbit.encrypt(message, key, cfg); * var plaintext = CryptoJS.Rabbit.decrypt(ciphertext, key, cfg); */ C.Rabbit = StreamCipher._createHelper(Rabbit); }()); return CryptoJS.Rabbit; })); },{"./cipher-core":45,"./core":46,"./enc-base64":47,"./evpkdf":49,"./md5":54}],68:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./enc-base64"), _dereq_("./md5"), _dereq_("./evpkdf"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./enc-base64", "./md5", "./evpkdf", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var StreamCipher = C_lib.StreamCipher; var C_algo = C.algo; /** * RC4 stream cipher algorithm. */ var RC4 = C_algo.RC4 = StreamCipher.extend({ _doReset: function () { // Shortcuts var key = this._key; var keyWords = key.words; var keySigBytes = key.sigBytes; // Init sbox var S = this._S = []; for (var i = 0; i < 256; i++) { S[i] = i; } // Key setup for (var i = 0, j = 0; i < 256; i++) { var keyByteIndex = i % keySigBytes; var keyByte = (keyWords[keyByteIndex >>> 2] >>> (24 - (keyByteIndex % 4) * 8)) & 0xff; j = (j + S[i] + keyByte) % 256; // Swap var t = S[i]; S[i] = S[j]; S[j] = t; } // Counters this._i = this._j = 0; }, _doProcessBlock: function (M, offset) { M[offset] ^= generateKeystreamWord.call(this); }, keySize: 256/32, ivSize: 0 }); function generateKeystreamWord() { // Shortcuts var S = this._S; var i = this._i; var j = this._j; // Generate keystream word var keystreamWord = 0; for (var n = 0; n < 4; n++) { i = (i + 1) % 256; j = (j + S[i]) % 256; // Swap var t = S[i]; S[i] = S[j]; S[j] = t; keystreamWord |= S[(S[i] + S[j]) % 256] << (24 - n * 8); } // Update counters this._i = i; this._j = j; return keystreamWord; } /** * Shortcut functions to the cipher's object interface. * * @example * * var ciphertext = CryptoJS.RC4.encrypt(message, key, cfg); * var plaintext = CryptoJS.RC4.decrypt(ciphertext, key, cfg); */ C.RC4 = StreamCipher._createHelper(RC4); /** * Modified RC4 stream cipher algorithm. */ var RC4Drop = C_algo.RC4Drop = RC4.extend({ /** * Configuration options. * * @property {number} drop The number of keystream words to drop. Default 192 */ cfg: RC4.cfg.extend({ drop: 192 }), _doReset: function () { RC4._doReset.call(this); // Drop for (var i = this.cfg.drop; i > 0; i--) { generateKeystreamWord.call(this); } } }); /** * Shortcut functions to the cipher's object interface. * * @example * * var ciphertext = CryptoJS.RC4Drop.encrypt(message, key, cfg); * var plaintext = CryptoJS.RC4Drop.decrypt(ciphertext, key, cfg); */ C.RC4Drop = StreamCipher._createHelper(RC4Drop); }()); return CryptoJS.RC4; })); },{"./cipher-core":45,"./core":46,"./enc-base64":47,"./evpkdf":49,"./md5":54}],69:[function(_dereq_,module,exports){ ;(function (root, factory) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { /** @preserve (c) 2012 by Cédric Mesnil. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ (function (Math) { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var WordArray = C_lib.WordArray; var Hasher = C_lib.Hasher; var C_algo = C.algo; // Constants table var _zl = WordArray.create([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 7, 4, 13, 1, 10, 6, 15, 3, 12, 0, 9, 5, 2, 14, 11, 8, 3, 10, 14, 4, 9, 15, 8, 1, 2, 7, 0, 6, 13, 11, 5, 12, 1, 9, 11, 10, 0, 8, 12, 4, 13, 3, 7, 15, 14, 5, 6, 2, 4, 0, 5, 9, 7, 12, 2, 10, 14, 1, 3, 8, 11, 6, 15, 13]); var _zr = WordArray.create([ 5, 14, 7, 0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12, 6, 11, 3, 7, 0, 13, 5, 10, 14, 15, 8, 12, 4, 9, 1, 2, 15, 5, 1, 3, 7, 14, 6, 9, 11, 8, 12, 2, 10, 0, 4, 13, 8, 6, 4, 1, 3, 11, 15, 0, 5, 12, 2, 13, 9, 7, 10, 14, 12, 15, 10, 4, 1, 5, 8, 7, 6, 2, 13, 14, 0, 3, 9, 11]); var _sl = WordArray.create([ 11, 14, 15, 12, 5, 8, 7, 9, 11, 13, 14, 15, 6, 7, 9, 8, 7, 6, 8, 13, 11, 9, 7, 15, 7, 12, 15, 9, 11, 7, 13, 12, 11, 13, 6, 7, 14, 9, 13, 15, 14, 8, 13, 6, 5, 12, 7, 5, 11, 12, 14, 15, 14, 15, 9, 8, 9, 14, 5, 6, 8, 6, 5, 12, 9, 15, 5, 11, 6, 8, 13, 12, 5, 12, 13, 14, 11, 8, 5, 6 ]); var _sr = WordArray.create([ 8, 9, 9, 11, 13, 15, 15, 5, 7, 7, 8, 11, 14, 14, 12, 6, 9, 13, 15, 7, 12, 8, 9, 11, 7, 7, 12, 7, 6, 15, 13, 11, 9, 7, 15, 11, 8, 6, 6, 14, 12, 13, 5, 14, 13, 13, 7, 5, 15, 5, 8, 11, 14, 14, 6, 14, 6, 9, 12, 9, 12, 5, 15, 8, 8, 5, 12, 9, 12, 5, 14, 6, 8, 13, 6, 5, 15, 13, 11, 11 ]); var _hl = WordArray.create([ 0x00000000, 0x5A827999, 0x6ED9EBA1, 0x8F1BBCDC, 0xA953FD4E]); var _hr = WordArray.create([ 0x50A28BE6, 0x5C4DD124, 0x6D703EF3, 0x7A6D76E9, 0x00000000]); /** * RIPEMD160 hash algorithm. */ var RIPEMD160 = C_algo.RIPEMD160 = Hasher.extend({ _doReset: function () { this._hash = WordArray.create([0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0]); }, _doProcessBlock: function (M, offset) { // Swap endian for (var i = 0; i < 16; i++) { // Shortcuts var offset_i = offset + i; var M_offset_i = M[offset_i]; // Swap M[offset_i] = ( (((M_offset_i << 8) | (M_offset_i >>> 24)) & 0x00ff00ff) | (((M_offset_i << 24) | (M_offset_i >>> 8)) & 0xff00ff00) ); } // Shortcut var H = this._hash.words; var hl = _hl.words; var hr = _hr.words; var zl = _zl.words; var zr = _zr.words; var sl = _sl.words; var sr = _sr.words; // Working variables var al, bl, cl, dl, el; var ar, br, cr, dr, er; ar = al = H[0]; br = bl = H[1]; cr = cl = H[2]; dr = dl = H[3]; er = el = H[4]; // Computation var t; for (var i = 0; i < 80; i += 1) { t = (al + M[offset+zl[i]])|0; if (i<16){ t += f1(bl,cl,dl) + hl[0]; } else if (i<32) { t += f2(bl,cl,dl) + hl[1]; } else if (i<48) { t += f3(bl,cl,dl) + hl[2]; } else if (i<64) { t += f4(bl,cl,dl) + hl[3]; } else {// if (i<80) { t += f5(bl,cl,dl) + hl[4]; } t = t|0; t = rotl(t,sl[i]); t = (t+el)|0; al = el; el = dl; dl = rotl(cl, 10); cl = bl; bl = t; t = (ar + M[offset+zr[i]])|0; if (i<16){ t += f5(br,cr,dr) + hr[0]; } else if (i<32) { t += f4(br,cr,dr) + hr[1]; } else if (i<48) { t += f3(br,cr,dr) + hr[2]; } else if (i<64) { t += f2(br,cr,dr) + hr[3]; } else {// if (i<80) { t += f1(br,cr,dr) + hr[4]; } t = t|0; t = rotl(t,sr[i]) ; t = (t+er)|0; ar = er; er = dr; dr = rotl(cr, 10); cr = br; br = t; } // Intermediate hash value t = (H[1] + cl + dr)|0; H[1] = (H[2] + dl + er)|0; H[2] = (H[3] + el + ar)|0; H[3] = (H[4] + al + br)|0; H[4] = (H[0] + bl + cr)|0; H[0] = t; }, _doFinalize: function () { // Shortcuts var data = this._data; var dataWords = data.words; var nBitsTotal = this._nDataBytes * 8; var nBitsLeft = data.sigBytes * 8; // Add padding dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = ( (((nBitsTotal << 8) | (nBitsTotal >>> 24)) & 0x00ff00ff) | (((nBitsTotal << 24) | (nBitsTotal >>> 8)) & 0xff00ff00) ); data.sigBytes = (dataWords.length + 1) * 4; // Hash final blocks this._process(); // Shortcuts var hash = this._hash; var H = hash.words; // Swap endian for (var i = 0; i < 5; i++) { // Shortcut var H_i = H[i]; // Swap H[i] = (((H_i << 8) | (H_i >>> 24)) & 0x00ff00ff) | (((H_i << 24) | (H_i >>> 8)) & 0xff00ff00); } // Return final computed hash return hash; }, clone: function () { var clone = Hasher.clone.call(this); clone._hash = this._hash.clone(); return clone; } }); function f1(x, y, z) { return ((x) ^ (y) ^ (z)); } function f2(x, y, z) { return (((x)&(y)) | ((~x)&(z))); } function f3(x, y, z) { return (((x) | (~(y))) ^ (z)); } function f4(x, y, z) { return (((x) & (z)) | ((y)&(~(z)))); } function f5(x, y, z) { return ((x) ^ ((y) |(~(z)))); } function rotl(x,n) { return (x<>>(32-n)); } /** * Shortcut function to the hasher's object interface. * * @param {WordArray|string} message The message to hash. * * @return {WordArray} The hash. * * @static * * @example * * var hash = CryptoJS.RIPEMD160('message'); * var hash = CryptoJS.RIPEMD160(wordArray); */ C.RIPEMD160 = Hasher._createHelper(RIPEMD160); /** * Shortcut function to the HMAC's object interface. * * @param {WordArray|string} message The message to hash. * @param {WordArray|string} key The secret key. * * @return {WordArray} The HMAC. * * @static * * @example * * var hmac = CryptoJS.HmacRIPEMD160(message, key); */ C.HmacRIPEMD160 = Hasher._createHmacHelper(RIPEMD160); }(Math)); return CryptoJS.RIPEMD160; })); },{"./core":46}],70:[function(_dereq_,module,exports){ ;(function (root, factory) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var WordArray = C_lib.WordArray; var Hasher = C_lib.Hasher; var C_algo = C.algo; // Reusable object var W = []; /** * SHA-1 hash algorithm. */ var SHA1 = C_algo.SHA1 = Hasher.extend({ _doReset: function () { this._hash = new WordArray.init([ 0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0 ]); }, _doProcessBlock: function (M, offset) { // Shortcut var H = this._hash.words; // Working variables var a = H[0]; var b = H[1]; var c = H[2]; var d = H[3]; var e = H[4]; // Computation for (var i = 0; i < 80; i++) { if (i < 16) { W[i] = M[offset + i] | 0; } else { var n = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16]; W[i] = (n << 1) | (n >>> 31); } var t = ((a << 5) | (a >>> 27)) + e + W[i]; if (i < 20) { t += ((b & c) | (~b & d)) + 0x5a827999; } else if (i < 40) { t += (b ^ c ^ d) + 0x6ed9eba1; } else if (i < 60) { t += ((b & c) | (b & d) | (c & d)) - 0x70e44324; } else /* if (i < 80) */ { t += (b ^ c ^ d) - 0x359d3e2a; } e = d; d = c; c = (b << 30) | (b >>> 2); b = a; a = t; } // Intermediate hash value H[0] = (H[0] + a) | 0; H[1] = (H[1] + b) | 0; H[2] = (H[2] + c) | 0; H[3] = (H[3] + d) | 0; H[4] = (H[4] + e) | 0; }, _doFinalize: function () { // Shortcuts var data = this._data; var dataWords = data.words; var nBitsTotal = this._nDataBytes * 8; var nBitsLeft = data.sigBytes * 8; // Add padding dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = Math.floor(nBitsTotal / 0x100000000); dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = nBitsTotal; data.sigBytes = dataWords.length * 4; // Hash final blocks this._process(); // Return final computed hash return this._hash; }, clone: function () { var clone = Hasher.clone.call(this); clone._hash = this._hash.clone(); return clone; } }); /** * Shortcut function to the hasher's object interface. * * @param {WordArray|string} message The message to hash. * * @return {WordArray} The hash. * * @static * * @example * * var hash = CryptoJS.SHA1('message'); * var hash = CryptoJS.SHA1(wordArray); */ C.SHA1 = Hasher._createHelper(SHA1); /** * Shortcut function to the HMAC's object interface. * * @param {WordArray|string} message The message to hash. * @param {WordArray|string} key The secret key. * * @return {WordArray} The HMAC. * * @static * * @example * * var hmac = CryptoJS.HmacSHA1(message, key); */ C.HmacSHA1 = Hasher._createHmacHelper(SHA1); }()); return CryptoJS.SHA1; })); },{"./core":46}],71:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./sha256")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./sha256"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var WordArray = C_lib.WordArray; var C_algo = C.algo; var SHA256 = C_algo.SHA256; /** * SHA-224 hash algorithm. */ var SHA224 = C_algo.SHA224 = SHA256.extend({ _doReset: function () { this._hash = new WordArray.init([ 0xc1059ed8, 0x367cd507, 0x3070dd17, 0xf70e5939, 0xffc00b31, 0x68581511, 0x64f98fa7, 0xbefa4fa4 ]); }, _doFinalize: function () { var hash = SHA256._doFinalize.call(this); hash.sigBytes -= 4; return hash; } }); /** * Shortcut function to the hasher's object interface. * * @param {WordArray|string} message The message to hash. * * @return {WordArray} The hash. * * @static * * @example * * var hash = CryptoJS.SHA224('message'); * var hash = CryptoJS.SHA224(wordArray); */ C.SHA224 = SHA256._createHelper(SHA224); /** * Shortcut function to the HMAC's object interface. * * @param {WordArray|string} message The message to hash. * @param {WordArray|string} key The secret key. * * @return {WordArray} The HMAC. * * @static * * @example * * var hmac = CryptoJS.HmacSHA224(message, key); */ C.HmacSHA224 = SHA256._createHmacHelper(SHA224); }()); return CryptoJS.SHA224; })); },{"./core":46,"./sha256":72}],72:[function(_dereq_,module,exports){ ;(function (root, factory) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function (Math) { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var WordArray = C_lib.WordArray; var Hasher = C_lib.Hasher; var C_algo = C.algo; // Initialization and round constants tables var H = []; var K = []; // Compute constants (function () { function isPrime(n) { var sqrtN = Math.sqrt(n); for (var factor = 2; factor <= sqrtN; factor++) { if (!(n % factor)) { return false; } } return true; } function getFractionalBits(n) { return ((n - (n | 0)) * 0x100000000) | 0; } var n = 2; var nPrime = 0; while (nPrime < 64) { if (isPrime(n)) { if (nPrime < 8) { H[nPrime] = getFractionalBits(Math.pow(n, 1 / 2)); } K[nPrime] = getFractionalBits(Math.pow(n, 1 / 3)); nPrime++; } n++; } }()); // Reusable object var W = []; /** * SHA-256 hash algorithm. */ var SHA256 = C_algo.SHA256 = Hasher.extend({ _doReset: function () { this._hash = new WordArray.init(H.slice(0)); }, _doProcessBlock: function (M, offset) { // Shortcut var H = this._hash.words; // Working variables var a = H[0]; var b = H[1]; var c = H[2]; var d = H[3]; var e = H[4]; var f = H[5]; var g = H[6]; var h = H[7]; // Computation for (var i = 0; i < 64; i++) { if (i < 16) { W[i] = M[offset + i] | 0; } else { var gamma0x = W[i - 15]; var gamma0 = ((gamma0x << 25) | (gamma0x >>> 7)) ^ ((gamma0x << 14) | (gamma0x >>> 18)) ^ (gamma0x >>> 3); var gamma1x = W[i - 2]; var gamma1 = ((gamma1x << 15) | (gamma1x >>> 17)) ^ ((gamma1x << 13) | (gamma1x >>> 19)) ^ (gamma1x >>> 10); W[i] = gamma0 + W[i - 7] + gamma1 + W[i - 16]; } var ch = (e & f) ^ (~e & g); var maj = (a & b) ^ (a & c) ^ (b & c); var sigma0 = ((a << 30) | (a >>> 2)) ^ ((a << 19) | (a >>> 13)) ^ ((a << 10) | (a >>> 22)); var sigma1 = ((e << 26) | (e >>> 6)) ^ ((e << 21) | (e >>> 11)) ^ ((e << 7) | (e >>> 25)); var t1 = h + sigma1 + ch + K[i] + W[i]; var t2 = sigma0 + maj; h = g; g = f; f = e; e = (d + t1) | 0; d = c; c = b; b = a; a = (t1 + t2) | 0; } // Intermediate hash value H[0] = (H[0] + a) | 0; H[1] = (H[1] + b) | 0; H[2] = (H[2] + c) | 0; H[3] = (H[3] + d) | 0; H[4] = (H[4] + e) | 0; H[5] = (H[5] + f) | 0; H[6] = (H[6] + g) | 0; H[7] = (H[7] + h) | 0; }, _doFinalize: function () { // Shortcuts var data = this._data; var dataWords = data.words; var nBitsTotal = this._nDataBytes * 8; var nBitsLeft = data.sigBytes * 8; // Add padding dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = Math.floor(nBitsTotal / 0x100000000); dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = nBitsTotal; data.sigBytes = dataWords.length * 4; // Hash final blocks this._process(); // Return final computed hash return this._hash; }, clone: function () { var clone = Hasher.clone.call(this); clone._hash = this._hash.clone(); return clone; } }); /** * Shortcut function to the hasher's object interface. * * @param {WordArray|string} message The message to hash. * * @return {WordArray} The hash. * * @static * * @example * * var hash = CryptoJS.SHA256('message'); * var hash = CryptoJS.SHA256(wordArray); */ C.SHA256 = Hasher._createHelper(SHA256); /** * Shortcut function to the HMAC's object interface. * * @param {WordArray|string} message The message to hash. * @param {WordArray|string} key The secret key. * * @return {WordArray} The HMAC. * * @static * * @example * * var hmac = CryptoJS.HmacSHA256(message, key); */ C.HmacSHA256 = Hasher._createHmacHelper(SHA256); }(Math)); return CryptoJS.SHA256; })); },{"./core":46}],73:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./x64-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./x64-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function (Math) { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var WordArray = C_lib.WordArray; var Hasher = C_lib.Hasher; var C_x64 = C.x64; var X64Word = C_x64.Word; var C_algo = C.algo; // Constants tables var RHO_OFFSETS = []; var PI_INDEXES = []; var ROUND_CONSTANTS = []; // Compute Constants (function () { // Compute rho offset constants var x = 1, y = 0; for (var t = 0; t < 24; t++) { RHO_OFFSETS[x + 5 * y] = ((t + 1) * (t + 2) / 2) % 64; var newX = y % 5; var newY = (2 * x + 3 * y) % 5; x = newX; y = newY; } // Compute pi index constants for (var x = 0; x < 5; x++) { for (var y = 0; y < 5; y++) { PI_INDEXES[x + 5 * y] = y + ((2 * x + 3 * y) % 5) * 5; } } // Compute round constants var LFSR = 0x01; for (var i = 0; i < 24; i++) { var roundConstantMsw = 0; var roundConstantLsw = 0; for (var j = 0; j < 7; j++) { if (LFSR & 0x01) { var bitPosition = (1 << j) - 1; if (bitPosition < 32) { roundConstantLsw ^= 1 << bitPosition; } else /* if (bitPosition >= 32) */ { roundConstantMsw ^= 1 << (bitPosition - 32); } } // Compute next LFSR if (LFSR & 0x80) { // Primitive polynomial over GF(2): x^8 + x^6 + x^5 + x^4 + 1 LFSR = (LFSR << 1) ^ 0x71; } else { LFSR <<= 1; } } ROUND_CONSTANTS[i] = X64Word.create(roundConstantMsw, roundConstantLsw); } }()); // Reusable objects for temporary values var T = []; (function () { for (var i = 0; i < 25; i++) { T[i] = X64Word.create(); } }()); /** * SHA-3 hash algorithm. */ var SHA3 = C_algo.SHA3 = Hasher.extend({ /** * Configuration options. * * @property {number} outputLength * The desired number of bits in the output hash. * Only values permitted are: 224, 256, 384, 512. * Default: 512 */ cfg: Hasher.cfg.extend({ outputLength: 512 }), _doReset: function () { var state = this._state = [] for (var i = 0; i < 25; i++) { state[i] = new X64Word.init(); } this.blockSize = (1600 - 2 * this.cfg.outputLength) / 32; }, _doProcessBlock: function (M, offset) { // Shortcuts var state = this._state; var nBlockSizeLanes = this.blockSize / 2; // Absorb for (var i = 0; i < nBlockSizeLanes; i++) { // Shortcuts var M2i = M[offset + 2 * i]; var M2i1 = M[offset + 2 * i + 1]; // Swap endian M2i = ( (((M2i << 8) | (M2i >>> 24)) & 0x00ff00ff) | (((M2i << 24) | (M2i >>> 8)) & 0xff00ff00) ); M2i1 = ( (((M2i1 << 8) | (M2i1 >>> 24)) & 0x00ff00ff) | (((M2i1 << 24) | (M2i1 >>> 8)) & 0xff00ff00) ); // Absorb message into state var lane = state[i]; lane.high ^= M2i1; lane.low ^= M2i; } // Rounds for (var round = 0; round < 24; round++) { // Theta for (var x = 0; x < 5; x++) { // Mix column lanes var tMsw = 0, tLsw = 0; for (var y = 0; y < 5; y++) { var lane = state[x + 5 * y]; tMsw ^= lane.high; tLsw ^= lane.low; } // Temporary values var Tx = T[x]; Tx.high = tMsw; Tx.low = tLsw; } for (var x = 0; x < 5; x++) { // Shortcuts var Tx4 = T[(x + 4) % 5]; var Tx1 = T[(x + 1) % 5]; var Tx1Msw = Tx1.high; var Tx1Lsw = Tx1.low; // Mix surrounding columns var tMsw = Tx4.high ^ ((Tx1Msw << 1) | (Tx1Lsw >>> 31)); var tLsw = Tx4.low ^ ((Tx1Lsw << 1) | (Tx1Msw >>> 31)); for (var y = 0; y < 5; y++) { var lane = state[x + 5 * y]; lane.high ^= tMsw; lane.low ^= tLsw; } } // Rho Pi for (var laneIndex = 1; laneIndex < 25; laneIndex++) { // Shortcuts var lane = state[laneIndex]; var laneMsw = lane.high; var laneLsw = lane.low; var rhoOffset = RHO_OFFSETS[laneIndex]; // Rotate lanes if (rhoOffset < 32) { var tMsw = (laneMsw << rhoOffset) | (laneLsw >>> (32 - rhoOffset)); var tLsw = (laneLsw << rhoOffset) | (laneMsw >>> (32 - rhoOffset)); } else /* if (rhoOffset >= 32) */ { var tMsw = (laneLsw << (rhoOffset - 32)) | (laneMsw >>> (64 - rhoOffset)); var tLsw = (laneMsw << (rhoOffset - 32)) | (laneLsw >>> (64 - rhoOffset)); } // Transpose lanes var TPiLane = T[PI_INDEXES[laneIndex]]; TPiLane.high = tMsw; TPiLane.low = tLsw; } // Rho pi at x = y = 0 var T0 = T[0]; var state0 = state[0]; T0.high = state0.high; T0.low = state0.low; // Chi for (var x = 0; x < 5; x++) { for (var y = 0; y < 5; y++) { // Shortcuts var laneIndex = x + 5 * y; var lane = state[laneIndex]; var TLane = T[laneIndex]; var Tx1Lane = T[((x + 1) % 5) + 5 * y]; var Tx2Lane = T[((x + 2) % 5) + 5 * y]; // Mix rows lane.high = TLane.high ^ (~Tx1Lane.high & Tx2Lane.high); lane.low = TLane.low ^ (~Tx1Lane.low & Tx2Lane.low); } } // Iota var lane = state[0]; var roundConstant = ROUND_CONSTANTS[round]; lane.high ^= roundConstant.high; lane.low ^= roundConstant.low;; } }, _doFinalize: function () { // Shortcuts var data = this._data; var dataWords = data.words; var nBitsTotal = this._nDataBytes * 8; var nBitsLeft = data.sigBytes * 8; var blockSizeBits = this.blockSize * 32; // Add padding dataWords[nBitsLeft >>> 5] |= 0x1 << (24 - nBitsLeft % 32); dataWords[((Math.ceil((nBitsLeft + 1) / blockSizeBits) * blockSizeBits) >>> 5) - 1] |= 0x80; data.sigBytes = dataWords.length * 4; // Hash final blocks this._process(); // Shortcuts var state = this._state; var outputLengthBytes = this.cfg.outputLength / 8; var outputLengthLanes = outputLengthBytes / 8; // Squeeze var hashWords = []; for (var i = 0; i < outputLengthLanes; i++) { // Shortcuts var lane = state[i]; var laneMsw = lane.high; var laneLsw = lane.low; // Swap endian laneMsw = ( (((laneMsw << 8) | (laneMsw >>> 24)) & 0x00ff00ff) | (((laneMsw << 24) | (laneMsw >>> 8)) & 0xff00ff00) ); laneLsw = ( (((laneLsw << 8) | (laneLsw >>> 24)) & 0x00ff00ff) | (((laneLsw << 24) | (laneLsw >>> 8)) & 0xff00ff00) ); // Squeeze state to retrieve hash hashWords.push(laneLsw); hashWords.push(laneMsw); } // Return final computed hash return new WordArray.init(hashWords, outputLengthBytes); }, clone: function () { var clone = Hasher.clone.call(this); var state = clone._state = this._state.slice(0); for (var i = 0; i < 25; i++) { state[i] = state[i].clone(); } return clone; } }); /** * Shortcut function to the hasher's object interface. * * @param {WordArray|string} message The message to hash. * * @return {WordArray} The hash. * * @static * * @example * * var hash = CryptoJS.SHA3('message'); * var hash = CryptoJS.SHA3(wordArray); */ C.SHA3 = Hasher._createHelper(SHA3); /** * Shortcut function to the HMAC's object interface. * * @param {WordArray|string} message The message to hash. * @param {WordArray|string} key The secret key. * * @return {WordArray} The HMAC. * * @static * * @example * * var hmac = CryptoJS.HmacSHA3(message, key); */ C.HmacSHA3 = Hasher._createHmacHelper(SHA3); }(Math)); return CryptoJS.SHA3; })); },{"./core":46,"./x64-core":77}],74:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./x64-core"), _dereq_("./sha512")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./x64-core", "./sha512"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Shortcuts var C = CryptoJS; var C_x64 = C.x64; var X64Word = C_x64.Word; var X64WordArray = C_x64.WordArray; var C_algo = C.algo; var SHA512 = C_algo.SHA512; /** * SHA-384 hash algorithm. */ var SHA384 = C_algo.SHA384 = SHA512.extend({ _doReset: function () { this._hash = new X64WordArray.init([ new X64Word.init(0xcbbb9d5d, 0xc1059ed8), new X64Word.init(0x629a292a, 0x367cd507), new X64Word.init(0x9159015a, 0x3070dd17), new X64Word.init(0x152fecd8, 0xf70e5939), new X64Word.init(0x67332667, 0xffc00b31), new X64Word.init(0x8eb44a87, 0x68581511), new X64Word.init(0xdb0c2e0d, 0x64f98fa7), new X64Word.init(0x47b5481d, 0xbefa4fa4) ]); }, _doFinalize: function () { var hash = SHA512._doFinalize.call(this); hash.sigBytes -= 16; return hash; } }); /** * Shortcut function to the hasher's object interface. * * @param {WordArray|string} message The message to hash. * * @return {WordArray} The hash. * * @static * * @example * * var hash = CryptoJS.SHA384('message'); * var hash = CryptoJS.SHA384(wordArray); */ C.SHA384 = SHA512._createHelper(SHA384); /** * Shortcut function to the HMAC's object interface. * * @param {WordArray|string} message The message to hash. * @param {WordArray|string} key The secret key. * * @return {WordArray} The HMAC. * * @static * * @example * * var hmac = CryptoJS.HmacSHA384(message, key); */ C.HmacSHA384 = SHA512._createHmacHelper(SHA384); }()); return CryptoJS.SHA384; })); },{"./core":46,"./sha512":75,"./x64-core":77}],75:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./x64-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./x64-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var Hasher = C_lib.Hasher; var C_x64 = C.x64; var X64Word = C_x64.Word; var X64WordArray = C_x64.WordArray; var C_algo = C.algo; function X64Word_create() { return X64Word.create.apply(X64Word, arguments); } // Constants var K = [ X64Word_create(0x428a2f98, 0xd728ae22), X64Word_create(0x71374491, 0x23ef65cd), X64Word_create(0xb5c0fbcf, 0xec4d3b2f), X64Word_create(0xe9b5dba5, 0x8189dbbc), X64Word_create(0x3956c25b, 0xf348b538), X64Word_create(0x59f111f1, 0xb605d019), X64Word_create(0x923f82a4, 0xaf194f9b), X64Word_create(0xab1c5ed5, 0xda6d8118), X64Word_create(0xd807aa98, 0xa3030242), X64Word_create(0x12835b01, 0x45706fbe), X64Word_create(0x243185be, 0x4ee4b28c), X64Word_create(0x550c7dc3, 0xd5ffb4e2), X64Word_create(0x72be5d74, 0xf27b896f), X64Word_create(0x80deb1fe, 0x3b1696b1), X64Word_create(0x9bdc06a7, 0x25c71235), X64Word_create(0xc19bf174, 0xcf692694), X64Word_create(0xe49b69c1, 0x9ef14ad2), X64Word_create(0xefbe4786, 0x384f25e3), X64Word_create(0x0fc19dc6, 0x8b8cd5b5), X64Word_create(0x240ca1cc, 0x77ac9c65), X64Word_create(0x2de92c6f, 0x592b0275), X64Word_create(0x4a7484aa, 0x6ea6e483), X64Word_create(0x5cb0a9dc, 0xbd41fbd4), X64Word_create(0x76f988da, 0x831153b5), X64Word_create(0x983e5152, 0xee66dfab), X64Word_create(0xa831c66d, 0x2db43210), X64Word_create(0xb00327c8, 0x98fb213f), X64Word_create(0xbf597fc7, 0xbeef0ee4), X64Word_create(0xc6e00bf3, 0x3da88fc2), X64Word_create(0xd5a79147, 0x930aa725), X64Word_create(0x06ca6351, 0xe003826f), X64Word_create(0x14292967, 0x0a0e6e70), X64Word_create(0x27b70a85, 0x46d22ffc), X64Word_create(0x2e1b2138, 0x5c26c926), X64Word_create(0x4d2c6dfc, 0x5ac42aed), X64Word_create(0x53380d13, 0x9d95b3df), X64Word_create(0x650a7354, 0x8baf63de), X64Word_create(0x766a0abb, 0x3c77b2a8), X64Word_create(0x81c2c92e, 0x47edaee6), X64Word_create(0x92722c85, 0x1482353b), X64Word_create(0xa2bfe8a1, 0x4cf10364), X64Word_create(0xa81a664b, 0xbc423001), X64Word_create(0xc24b8b70, 0xd0f89791), X64Word_create(0xc76c51a3, 0x0654be30), X64Word_create(0xd192e819, 0xd6ef5218), X64Word_create(0xd6990624, 0x5565a910), X64Word_create(0xf40e3585, 0x5771202a), X64Word_create(0x106aa070, 0x32bbd1b8), X64Word_create(0x19a4c116, 0xb8d2d0c8), X64Word_create(0x1e376c08, 0x5141ab53), X64Word_create(0x2748774c, 0xdf8eeb99), X64Word_create(0x34b0bcb5, 0xe19b48a8), X64Word_create(0x391c0cb3, 0xc5c95a63), X64Word_create(0x4ed8aa4a, 0xe3418acb), X64Word_create(0x5b9cca4f, 0x7763e373), X64Word_create(0x682e6ff3, 0xd6b2b8a3), X64Word_create(0x748f82ee, 0x5defb2fc), X64Word_create(0x78a5636f, 0x43172f60), X64Word_create(0x84c87814, 0xa1f0ab72), X64Word_create(0x8cc70208, 0x1a6439ec), X64Word_create(0x90befffa, 0x23631e28), X64Word_create(0xa4506ceb, 0xde82bde9), X64Word_create(0xbef9a3f7, 0xb2c67915), X64Word_create(0xc67178f2, 0xe372532b), X64Word_create(0xca273ece, 0xea26619c), X64Word_create(0xd186b8c7, 0x21c0c207), X64Word_create(0xeada7dd6, 0xcde0eb1e), X64Word_create(0xf57d4f7f, 0xee6ed178), X64Word_create(0x06f067aa, 0x72176fba), X64Word_create(0x0a637dc5, 0xa2c898a6), X64Word_create(0x113f9804, 0xbef90dae), X64Word_create(0x1b710b35, 0x131c471b), X64Word_create(0x28db77f5, 0x23047d84), X64Word_create(0x32caab7b, 0x40c72493), X64Word_create(0x3c9ebe0a, 0x15c9bebc), X64Word_create(0x431d67c4, 0x9c100d4c), X64Word_create(0x4cc5d4be, 0xcb3e42b6), X64Word_create(0x597f299c, 0xfc657e2a), X64Word_create(0x5fcb6fab, 0x3ad6faec), X64Word_create(0x6c44198c, 0x4a475817) ]; // Reusable objects var W = []; (function () { for (var i = 0; i < 80; i++) { W[i] = X64Word_create(); } }()); /** * SHA-512 hash algorithm. */ var SHA512 = C_algo.SHA512 = Hasher.extend({ _doReset: function () { this._hash = new X64WordArray.init([ new X64Word.init(0x6a09e667, 0xf3bcc908), new X64Word.init(0xbb67ae85, 0x84caa73b), new X64Word.init(0x3c6ef372, 0xfe94f82b), new X64Word.init(0xa54ff53a, 0x5f1d36f1), new X64Word.init(0x510e527f, 0xade682d1), new X64Word.init(0x9b05688c, 0x2b3e6c1f), new X64Word.init(0x1f83d9ab, 0xfb41bd6b), new X64Word.init(0x5be0cd19, 0x137e2179) ]); }, _doProcessBlock: function (M, offset) { // Shortcuts var H = this._hash.words; var H0 = H[0]; var H1 = H[1]; var H2 = H[2]; var H3 = H[3]; var H4 = H[4]; var H5 = H[5]; var H6 = H[6]; var H7 = H[7]; var H0h = H0.high; var H0l = H0.low; var H1h = H1.high; var H1l = H1.low; var H2h = H2.high; var H2l = H2.low; var H3h = H3.high; var H3l = H3.low; var H4h = H4.high; var H4l = H4.low; var H5h = H5.high; var H5l = H5.low; var H6h = H6.high; var H6l = H6.low; var H7h = H7.high; var H7l = H7.low; // Working variables var ah = H0h; var al = H0l; var bh = H1h; var bl = H1l; var ch = H2h; var cl = H2l; var dh = H3h; var dl = H3l; var eh = H4h; var el = H4l; var fh = H5h; var fl = H5l; var gh = H6h; var gl = H6l; var hh = H7h; var hl = H7l; // Rounds for (var i = 0; i < 80; i++) { // Shortcut var Wi = W[i]; // Extend message if (i < 16) { var Wih = Wi.high = M[offset + i * 2] | 0; var Wil = Wi.low = M[offset + i * 2 + 1] | 0; } else { // Gamma0 var gamma0x = W[i - 15]; var gamma0xh = gamma0x.high; var gamma0xl = gamma0x.low; var gamma0h = ((gamma0xh >>> 1) | (gamma0xl << 31)) ^ ((gamma0xh >>> 8) | (gamma0xl << 24)) ^ (gamma0xh >>> 7); var gamma0l = ((gamma0xl >>> 1) | (gamma0xh << 31)) ^ ((gamma0xl >>> 8) | (gamma0xh << 24)) ^ ((gamma0xl >>> 7) | (gamma0xh << 25)); // Gamma1 var gamma1x = W[i - 2]; var gamma1xh = gamma1x.high; var gamma1xl = gamma1x.low; var gamma1h = ((gamma1xh >>> 19) | (gamma1xl << 13)) ^ ((gamma1xh << 3) | (gamma1xl >>> 29)) ^ (gamma1xh >>> 6); var gamma1l = ((gamma1xl >>> 19) | (gamma1xh << 13)) ^ ((gamma1xl << 3) | (gamma1xh >>> 29)) ^ ((gamma1xl >>> 6) | (gamma1xh << 26)); // W[i] = gamma0 + W[i - 7] + gamma1 + W[i - 16] var Wi7 = W[i - 7]; var Wi7h = Wi7.high; var Wi7l = Wi7.low; var Wi16 = W[i - 16]; var Wi16h = Wi16.high; var Wi16l = Wi16.low; var Wil = gamma0l + Wi7l; var Wih = gamma0h + Wi7h + ((Wil >>> 0) < (gamma0l >>> 0) ? 1 : 0); var Wil = Wil + gamma1l; var Wih = Wih + gamma1h + ((Wil >>> 0) < (gamma1l >>> 0) ? 1 : 0); var Wil = Wil + Wi16l; var Wih = Wih + Wi16h + ((Wil >>> 0) < (Wi16l >>> 0) ? 1 : 0); Wi.high = Wih; Wi.low = Wil; } var chh = (eh & fh) ^ (~eh & gh); var chl = (el & fl) ^ (~el & gl); var majh = (ah & bh) ^ (ah & ch) ^ (bh & ch); var majl = (al & bl) ^ (al & cl) ^ (bl & cl); var sigma0h = ((ah >>> 28) | (al << 4)) ^ ((ah << 30) | (al >>> 2)) ^ ((ah << 25) | (al >>> 7)); var sigma0l = ((al >>> 28) | (ah << 4)) ^ ((al << 30) | (ah >>> 2)) ^ ((al << 25) | (ah >>> 7)); var sigma1h = ((eh >>> 14) | (el << 18)) ^ ((eh >>> 18) | (el << 14)) ^ ((eh << 23) | (el >>> 9)); var sigma1l = ((el >>> 14) | (eh << 18)) ^ ((el >>> 18) | (eh << 14)) ^ ((el << 23) | (eh >>> 9)); // t1 = h + sigma1 + ch + K[i] + W[i] var Ki = K[i]; var Kih = Ki.high; var Kil = Ki.low; var t1l = hl + sigma1l; var t1h = hh + sigma1h + ((t1l >>> 0) < (hl >>> 0) ? 1 : 0); var t1l = t1l + chl; var t1h = t1h + chh + ((t1l >>> 0) < (chl >>> 0) ? 1 : 0); var t1l = t1l + Kil; var t1h = t1h + Kih + ((t1l >>> 0) < (Kil >>> 0) ? 1 : 0); var t1l = t1l + Wil; var t1h = t1h + Wih + ((t1l >>> 0) < (Wil >>> 0) ? 1 : 0); // t2 = sigma0 + maj var t2l = sigma0l + majl; var t2h = sigma0h + majh + ((t2l >>> 0) < (sigma0l >>> 0) ? 1 : 0); // Update working variables hh = gh; hl = gl; gh = fh; gl = fl; fh = eh; fl = el; el = (dl + t1l) | 0; eh = (dh + t1h + ((el >>> 0) < (dl >>> 0) ? 1 : 0)) | 0; dh = ch; dl = cl; ch = bh; cl = bl; bh = ah; bl = al; al = (t1l + t2l) | 0; ah = (t1h + t2h + ((al >>> 0) < (t1l >>> 0) ? 1 : 0)) | 0; } // Intermediate hash value H0l = H0.low = (H0l + al); H0.high = (H0h + ah + ((H0l >>> 0) < (al >>> 0) ? 1 : 0)); H1l = H1.low = (H1l + bl); H1.high = (H1h + bh + ((H1l >>> 0) < (bl >>> 0) ? 1 : 0)); H2l = H2.low = (H2l + cl); H2.high = (H2h + ch + ((H2l >>> 0) < (cl >>> 0) ? 1 : 0)); H3l = H3.low = (H3l + dl); H3.high = (H3h + dh + ((H3l >>> 0) < (dl >>> 0) ? 1 : 0)); H4l = H4.low = (H4l + el); H4.high = (H4h + eh + ((H4l >>> 0) < (el >>> 0) ? 1 : 0)); H5l = H5.low = (H5l + fl); H5.high = (H5h + fh + ((H5l >>> 0) < (fl >>> 0) ? 1 : 0)); H6l = H6.low = (H6l + gl); H6.high = (H6h + gh + ((H6l >>> 0) < (gl >>> 0) ? 1 : 0)); H7l = H7.low = (H7l + hl); H7.high = (H7h + hh + ((H7l >>> 0) < (hl >>> 0) ? 1 : 0)); }, _doFinalize: function () { // Shortcuts var data = this._data; var dataWords = data.words; var nBitsTotal = this._nDataBytes * 8; var nBitsLeft = data.sigBytes * 8; // Add padding dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); dataWords[(((nBitsLeft + 128) >>> 10) << 5) + 30] = Math.floor(nBitsTotal / 0x100000000); dataWords[(((nBitsLeft + 128) >>> 10) << 5) + 31] = nBitsTotal; data.sigBytes = dataWords.length * 4; // Hash final blocks this._process(); // Convert hash to 32-bit word array before returning var hash = this._hash.toX32(); // Return final computed hash return hash; }, clone: function () { var clone = Hasher.clone.call(this); clone._hash = this._hash.clone(); return clone; }, blockSize: 1024/32 }); /** * Shortcut function to the hasher's object interface. * * @param {WordArray|string} message The message to hash. * * @return {WordArray} The hash. * * @static * * @example * * var hash = CryptoJS.SHA512('message'); * var hash = CryptoJS.SHA512(wordArray); */ C.SHA512 = Hasher._createHelper(SHA512); /** * Shortcut function to the HMAC's object interface. * * @param {WordArray|string} message The message to hash. * @param {WordArray|string} key The secret key. * * @return {WordArray} The HMAC. * * @static * * @example * * var hmac = CryptoJS.HmacSHA512(message, key); */ C.HmacSHA512 = Hasher._createHmacHelper(SHA512); }()); return CryptoJS.SHA512; })); },{"./core":46,"./x64-core":77}],76:[function(_dereq_,module,exports){ ;(function (root, factory, undef) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core"), _dereq_("./enc-base64"), _dereq_("./md5"), _dereq_("./evpkdf"), _dereq_("./cipher-core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core", "./enc-base64", "./md5", "./evpkdf", "./cipher-core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function () { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var WordArray = C_lib.WordArray; var BlockCipher = C_lib.BlockCipher; var C_algo = C.algo; // Permuted Choice 1 constants var PC1 = [ 57, 49, 41, 33, 25, 17, 9, 1, 58, 50, 42, 34, 26, 18, 10, 2, 59, 51, 43, 35, 27, 19, 11, 3, 60, 52, 44, 36, 63, 55, 47, 39, 31, 23, 15, 7, 62, 54, 46, 38, 30, 22, 14, 6, 61, 53, 45, 37, 29, 21, 13, 5, 28, 20, 12, 4 ]; // Permuted Choice 2 constants var PC2 = [ 14, 17, 11, 24, 1, 5, 3, 28, 15, 6, 21, 10, 23, 19, 12, 4, 26, 8, 16, 7, 27, 20, 13, 2, 41, 52, 31, 37, 47, 55, 30, 40, 51, 45, 33, 48, 44, 49, 39, 56, 34, 53, 46, 42, 50, 36, 29, 32 ]; // Cumulative bit shift constants var BIT_SHIFTS = [1, 2, 4, 6, 8, 10, 12, 14, 15, 17, 19, 21, 23, 25, 27, 28]; // SBOXes and round permutation constants var SBOX_P = [ { 0x0: 0x808200, 0x10000000: 0x8000, 0x20000000: 0x808002, 0x30000000: 0x2, 0x40000000: 0x200, 0x50000000: 0x808202, 0x60000000: 0x800202, 0x70000000: 0x800000, 0x80000000: 0x202, 0x90000000: 0x800200, 0xa0000000: 0x8200, 0xb0000000: 0x808000, 0xc0000000: 0x8002, 0xd0000000: 0x800002, 0xe0000000: 0x0, 0xf0000000: 0x8202, 0x8000000: 0x0, 0x18000000: 0x808202, 0x28000000: 0x8202, 0x38000000: 0x8000, 0x48000000: 0x808200, 0x58000000: 0x200, 0x68000000: 0x808002, 0x78000000: 0x2, 0x88000000: 0x800200, 0x98000000: 0x8200, 0xa8000000: 0x808000, 0xb8000000: 0x800202, 0xc8000000: 0x800002, 0xd8000000: 0x8002, 0xe8000000: 0x202, 0xf8000000: 0x800000, 0x1: 0x8000, 0x10000001: 0x2, 0x20000001: 0x808200, 0x30000001: 0x800000, 0x40000001: 0x808002, 0x50000001: 0x8200, 0x60000001: 0x200, 0x70000001: 0x800202, 0x80000001: 0x808202, 0x90000001: 0x808000, 0xa0000001: 0x800002, 0xb0000001: 0x8202, 0xc0000001: 0x202, 0xd0000001: 0x800200, 0xe0000001: 0x8002, 0xf0000001: 0x0, 0x8000001: 0x808202, 0x18000001: 0x808000, 0x28000001: 0x800000, 0x38000001: 0x200, 0x48000001: 0x8000, 0x58000001: 0x800002, 0x68000001: 0x2, 0x78000001: 0x8202, 0x88000001: 0x8002, 0x98000001: 0x800202, 0xa8000001: 0x202, 0xb8000001: 0x808200, 0xc8000001: 0x800200, 0xd8000001: 0x0, 0xe8000001: 0x8200, 0xf8000001: 0x808002 }, { 0x0: 0x40084010, 0x1000000: 0x4000, 0x2000000: 0x80000, 0x3000000: 0x40080010, 0x4000000: 0x40000010, 0x5000000: 0x40084000, 0x6000000: 0x40004000, 0x7000000: 0x10, 0x8000000: 0x84000, 0x9000000: 0x40004010, 0xa000000: 0x40000000, 0xb000000: 0x84010, 0xc000000: 0x80010, 0xd000000: 0x0, 0xe000000: 0x4010, 0xf000000: 0x40080000, 0x800000: 0x40004000, 0x1800000: 0x84010, 0x2800000: 0x10, 0x3800000: 0x40004010, 0x4800000: 0x40084010, 0x5800000: 0x40000000, 0x6800000: 0x80000, 0x7800000: 0x40080010, 0x8800000: 0x80010, 0x9800000: 0x0, 0xa800000: 0x4000, 0xb800000: 0x40080000, 0xc800000: 0x40000010, 0xd800000: 0x84000, 0xe800000: 0x40084000, 0xf800000: 0x4010, 0x10000000: 0x0, 0x11000000: 0x40080010, 0x12000000: 0x40004010, 0x13000000: 0x40084000, 0x14000000: 0x40080000, 0x15000000: 0x10, 0x16000000: 0x84010, 0x17000000: 0x4000, 0x18000000: 0x4010, 0x19000000: 0x80000, 0x1a000000: 0x80010, 0x1b000000: 0x40000010, 0x1c000000: 0x84000, 0x1d000000: 0x40004000, 0x1e000000: 0x40000000, 0x1f000000: 0x40084010, 0x10800000: 0x84010, 0x11800000: 0x80000, 0x12800000: 0x40080000, 0x13800000: 0x4000, 0x14800000: 0x40004000, 0x15800000: 0x40084010, 0x16800000: 0x10, 0x17800000: 0x40000000, 0x18800000: 0x40084000, 0x19800000: 0x40000010, 0x1a800000: 0x40004010, 0x1b800000: 0x80010, 0x1c800000: 0x0, 0x1d800000: 0x4010, 0x1e800000: 0x40080010, 0x1f800000: 0x84000 }, { 0x0: 0x104, 0x100000: 0x0, 0x200000: 0x4000100, 0x300000: 0x10104, 0x400000: 0x10004, 0x500000: 0x4000004, 0x600000: 0x4010104, 0x700000: 0x4010000, 0x800000: 0x4000000, 0x900000: 0x4010100, 0xa00000: 0x10100, 0xb00000: 0x4010004, 0xc00000: 0x4000104, 0xd00000: 0x10000, 0xe00000: 0x4, 0xf00000: 0x100, 0x80000: 0x4010100, 0x180000: 0x4010004, 0x280000: 0x0, 0x380000: 0x4000100, 0x480000: 0x4000004, 0x580000: 0x10000, 0x680000: 0x10004, 0x780000: 0x104, 0x880000: 0x4, 0x980000: 0x100, 0xa80000: 0x4010000, 0xb80000: 0x10104, 0xc80000: 0x10100, 0xd80000: 0x4000104, 0xe80000: 0x4010104, 0xf80000: 0x4000000, 0x1000000: 0x4010100, 0x1100000: 0x10004, 0x1200000: 0x10000, 0x1300000: 0x4000100, 0x1400000: 0x100, 0x1500000: 0x4010104, 0x1600000: 0x4000004, 0x1700000: 0x0, 0x1800000: 0x4000104, 0x1900000: 0x4000000, 0x1a00000: 0x4, 0x1b00000: 0x10100, 0x1c00000: 0x4010000, 0x1d00000: 0x104, 0x1e00000: 0x10104, 0x1f00000: 0x4010004, 0x1080000: 0x4000000, 0x1180000: 0x104, 0x1280000: 0x4010100, 0x1380000: 0x0, 0x1480000: 0x10004, 0x1580000: 0x4000100, 0x1680000: 0x100, 0x1780000: 0x4010004, 0x1880000: 0x10000, 0x1980000: 0x4010104, 0x1a80000: 0x10104, 0x1b80000: 0x4000004, 0x1c80000: 0x4000104, 0x1d80000: 0x4010000, 0x1e80000: 0x4, 0x1f80000: 0x10100 }, { 0x0: 0x80401000, 0x10000: 0x80001040, 0x20000: 0x401040, 0x30000: 0x80400000, 0x40000: 0x0, 0x50000: 0x401000, 0x60000: 0x80000040, 0x70000: 0x400040, 0x80000: 0x80000000, 0x90000: 0x400000, 0xa0000: 0x40, 0xb0000: 0x80001000, 0xc0000: 0x80400040, 0xd0000: 0x1040, 0xe0000: 0x1000, 0xf0000: 0x80401040, 0x8000: 0x80001040, 0x18000: 0x40, 0x28000: 0x80400040, 0x38000: 0x80001000, 0x48000: 0x401000, 0x58000: 0x80401040, 0x68000: 0x0, 0x78000: 0x80400000, 0x88000: 0x1000, 0x98000: 0x80401000, 0xa8000: 0x400000, 0xb8000: 0x1040, 0xc8000: 0x80000000, 0xd8000: 0x400040, 0xe8000: 0x401040, 0xf8000: 0x80000040, 0x100000: 0x400040, 0x110000: 0x401000, 0x120000: 0x80000040, 0x130000: 0x0, 0x140000: 0x1040, 0x150000: 0x80400040, 0x160000: 0x80401000, 0x170000: 0x80001040, 0x180000: 0x80401040, 0x190000: 0x80000000, 0x1a0000: 0x80400000, 0x1b0000: 0x401040, 0x1c0000: 0x80001000, 0x1d0000: 0x400000, 0x1e0000: 0x40, 0x1f0000: 0x1000, 0x108000: 0x80400000, 0x118000: 0x80401040, 0x128000: 0x0, 0x138000: 0x401000, 0x148000: 0x400040, 0x158000: 0x80000000, 0x168000: 0x80001040, 0x178000: 0x40, 0x188000: 0x80000040, 0x198000: 0x1000, 0x1a8000: 0x80001000, 0x1b8000: 0x80400040, 0x1c8000: 0x1040, 0x1d8000: 0x80401000, 0x1e8000: 0x400000, 0x1f8000: 0x401040 }, { 0x0: 0x80, 0x1000: 0x1040000, 0x2000: 0x40000, 0x3000: 0x20000000, 0x4000: 0x20040080, 0x5000: 0x1000080, 0x6000: 0x21000080, 0x7000: 0x40080, 0x8000: 0x1000000, 0x9000: 0x20040000, 0xa000: 0x20000080, 0xb000: 0x21040080, 0xc000: 0x21040000, 0xd000: 0x0, 0xe000: 0x1040080, 0xf000: 0x21000000, 0x800: 0x1040080, 0x1800: 0x21000080, 0x2800: 0x80, 0x3800: 0x1040000, 0x4800: 0x40000, 0x5800: 0x20040080, 0x6800: 0x21040000, 0x7800: 0x20000000, 0x8800: 0x20040000, 0x9800: 0x0, 0xa800: 0x21040080, 0xb800: 0x1000080, 0xc800: 0x20000080, 0xd800: 0x21000000, 0xe800: 0x1000000, 0xf800: 0x40080, 0x10000: 0x40000, 0x11000: 0x80, 0x12000: 0x20000000, 0x13000: 0x21000080, 0x14000: 0x1000080, 0x15000: 0x21040000, 0x16000: 0x20040080, 0x17000: 0x1000000, 0x18000: 0x21040080, 0x19000: 0x21000000, 0x1a000: 0x1040000, 0x1b000: 0x20040000, 0x1c000: 0x40080, 0x1d000: 0x20000080, 0x1e000: 0x0, 0x1f000: 0x1040080, 0x10800: 0x21000080, 0x11800: 0x1000000, 0x12800: 0x1040000, 0x13800: 0x20040080, 0x14800: 0x20000000, 0x15800: 0x1040080, 0x16800: 0x80, 0x17800: 0x21040000, 0x18800: 0x40080, 0x19800: 0x21040080, 0x1a800: 0x0, 0x1b800: 0x21000000, 0x1c800: 0x1000080, 0x1d800: 0x40000, 0x1e800: 0x20040000, 0x1f800: 0x20000080 }, { 0x0: 0x10000008, 0x100: 0x2000, 0x200: 0x10200000, 0x300: 0x10202008, 0x400: 0x10002000, 0x500: 0x200000, 0x600: 0x200008, 0x700: 0x10000000, 0x800: 0x0, 0x900: 0x10002008, 0xa00: 0x202000, 0xb00: 0x8, 0xc00: 0x10200008, 0xd00: 0x202008, 0xe00: 0x2008, 0xf00: 0x10202000, 0x80: 0x10200000, 0x180: 0x10202008, 0x280: 0x8, 0x380: 0x200000, 0x480: 0x202008, 0x580: 0x10000008, 0x680: 0x10002000, 0x780: 0x2008, 0x880: 0x200008, 0x980: 0x2000, 0xa80: 0x10002008, 0xb80: 0x10200008, 0xc80: 0x0, 0xd80: 0x10202000, 0xe80: 0x202000, 0xf80: 0x10000000, 0x1000: 0x10002000, 0x1100: 0x10200008, 0x1200: 0x10202008, 0x1300: 0x2008, 0x1400: 0x200000, 0x1500: 0x10000000, 0x1600: 0x10000008, 0x1700: 0x202000, 0x1800: 0x202008, 0x1900: 0x0, 0x1a00: 0x8, 0x1b00: 0x10200000, 0x1c00: 0x2000, 0x1d00: 0x10002008, 0x1e00: 0x10202000, 0x1f00: 0x200008, 0x1080: 0x8, 0x1180: 0x202000, 0x1280: 0x200000, 0x1380: 0x10000008, 0x1480: 0x10002000, 0x1580: 0x2008, 0x1680: 0x10202008, 0x1780: 0x10200000, 0x1880: 0x10202000, 0x1980: 0x10200008, 0x1a80: 0x2000, 0x1b80: 0x202008, 0x1c80: 0x200008, 0x1d80: 0x0, 0x1e80: 0x10000000, 0x1f80: 0x10002008 }, { 0x0: 0x100000, 0x10: 0x2000401, 0x20: 0x400, 0x30: 0x100401, 0x40: 0x2100401, 0x50: 0x0, 0x60: 0x1, 0x70: 0x2100001, 0x80: 0x2000400, 0x90: 0x100001, 0xa0: 0x2000001, 0xb0: 0x2100400, 0xc0: 0x2100000, 0xd0: 0x401, 0xe0: 0x100400, 0xf0: 0x2000000, 0x8: 0x2100001, 0x18: 0x0, 0x28: 0x2000401, 0x38: 0x2100400, 0x48: 0x100000, 0x58: 0x2000001, 0x68: 0x2000000, 0x78: 0x401, 0x88: 0x100401, 0x98: 0x2000400, 0xa8: 0x2100000, 0xb8: 0x100001, 0xc8: 0x400, 0xd8: 0x2100401, 0xe8: 0x1, 0xf8: 0x100400, 0x100: 0x2000000, 0x110: 0x100000, 0x120: 0x2000401, 0x130: 0x2100001, 0x140: 0x100001, 0x150: 0x2000400, 0x160: 0x2100400, 0x170: 0x100401, 0x180: 0x401, 0x190: 0x2100401, 0x1a0: 0x100400, 0x1b0: 0x1, 0x1c0: 0x0, 0x1d0: 0x2100000, 0x1e0: 0x2000001, 0x1f0: 0x400, 0x108: 0x100400, 0x118: 0x2000401, 0x128: 0x2100001, 0x138: 0x1, 0x148: 0x2000000, 0x158: 0x100000, 0x168: 0x401, 0x178: 0x2100400, 0x188: 0x2000001, 0x198: 0x2100000, 0x1a8: 0x0, 0x1b8: 0x2100401, 0x1c8: 0x100401, 0x1d8: 0x400, 0x1e8: 0x2000400, 0x1f8: 0x100001 }, { 0x0: 0x8000820, 0x1: 0x20000, 0x2: 0x8000000, 0x3: 0x20, 0x4: 0x20020, 0x5: 0x8020820, 0x6: 0x8020800, 0x7: 0x800, 0x8: 0x8020000, 0x9: 0x8000800, 0xa: 0x20800, 0xb: 0x8020020, 0xc: 0x820, 0xd: 0x0, 0xe: 0x8000020, 0xf: 0x20820, 0x80000000: 0x800, 0x80000001: 0x8020820, 0x80000002: 0x8000820, 0x80000003: 0x8000000, 0x80000004: 0x8020000, 0x80000005: 0x20800, 0x80000006: 0x20820, 0x80000007: 0x20, 0x80000008: 0x8000020, 0x80000009: 0x820, 0x8000000a: 0x20020, 0x8000000b: 0x8020800, 0x8000000c: 0x0, 0x8000000d: 0x8020020, 0x8000000e: 0x8000800, 0x8000000f: 0x20000, 0x10: 0x20820, 0x11: 0x8020800, 0x12: 0x20, 0x13: 0x800, 0x14: 0x8000800, 0x15: 0x8000020, 0x16: 0x8020020, 0x17: 0x20000, 0x18: 0x0, 0x19: 0x20020, 0x1a: 0x8020000, 0x1b: 0x8000820, 0x1c: 0x8020820, 0x1d: 0x20800, 0x1e: 0x820, 0x1f: 0x8000000, 0x80000010: 0x20000, 0x80000011: 0x800, 0x80000012: 0x8020020, 0x80000013: 0x20820, 0x80000014: 0x20, 0x80000015: 0x8020000, 0x80000016: 0x8000000, 0x80000017: 0x8000820, 0x80000018: 0x8020820, 0x80000019: 0x8000020, 0x8000001a: 0x8000800, 0x8000001b: 0x0, 0x8000001c: 0x20800, 0x8000001d: 0x820, 0x8000001e: 0x20020, 0x8000001f: 0x8020800 } ]; // Masks that select the SBOX input var SBOX_MASK = [ 0xf8000001, 0x1f800000, 0x01f80000, 0x001f8000, 0x0001f800, 0x00001f80, 0x000001f8, 0x8000001f ]; /** * DES block cipher algorithm. */ var DES = C_algo.DES = BlockCipher.extend({ _doReset: function () { // Shortcuts var key = this._key; var keyWords = key.words; // Select 56 bits according to PC1 var keyBits = []; for (var i = 0; i < 56; i++) { var keyBitPos = PC1[i] - 1; keyBits[i] = (keyWords[keyBitPos >>> 5] >>> (31 - keyBitPos % 32)) & 1; } // Assemble 16 subkeys var subKeys = this._subKeys = []; for (var nSubKey = 0; nSubKey < 16; nSubKey++) { // Create subkey var subKey = subKeys[nSubKey] = []; // Shortcut var bitShift = BIT_SHIFTS[nSubKey]; // Select 48 bits according to PC2 for (var i = 0; i < 24; i++) { // Select from the left 28 key bits subKey[(i / 6) | 0] |= keyBits[((PC2[i] - 1) + bitShift) % 28] << (31 - i % 6); // Select from the right 28 key bits subKey[4 + ((i / 6) | 0)] |= keyBits[28 + (((PC2[i + 24] - 1) + bitShift) % 28)] << (31 - i % 6); } // Since each subkey is applied to an expanded 32-bit input, // the subkey can be broken into 8 values scaled to 32-bits, // which allows the key to be used without expansion subKey[0] = (subKey[0] << 1) | (subKey[0] >>> 31); for (var i = 1; i < 7; i++) { subKey[i] = subKey[i] >>> ((i - 1) * 4 + 3); } subKey[7] = (subKey[7] << 5) | (subKey[7] >>> 27); } // Compute inverse subkeys var invSubKeys = this._invSubKeys = []; for (var i = 0; i < 16; i++) { invSubKeys[i] = subKeys[15 - i]; } }, encryptBlock: function (M, offset) { this._doCryptBlock(M, offset, this._subKeys); }, decryptBlock: function (M, offset) { this._doCryptBlock(M, offset, this._invSubKeys); }, _doCryptBlock: function (M, offset, subKeys) { // Get input this._lBlock = M[offset]; this._rBlock = M[offset + 1]; // Initial permutation exchangeLR.call(this, 4, 0x0f0f0f0f); exchangeLR.call(this, 16, 0x0000ffff); exchangeRL.call(this, 2, 0x33333333); exchangeRL.call(this, 8, 0x00ff00ff); exchangeLR.call(this, 1, 0x55555555); // Rounds for (var round = 0; round < 16; round++) { // Shortcuts var subKey = subKeys[round]; var lBlock = this._lBlock; var rBlock = this._rBlock; // Feistel function var f = 0; for (var i = 0; i < 8; i++) { f |= SBOX_P[i][((rBlock ^ subKey[i]) & SBOX_MASK[i]) >>> 0]; } this._lBlock = rBlock; this._rBlock = lBlock ^ f; } // Undo swap from last round var t = this._lBlock; this._lBlock = this._rBlock; this._rBlock = t; // Final permutation exchangeLR.call(this, 1, 0x55555555); exchangeRL.call(this, 8, 0x00ff00ff); exchangeRL.call(this, 2, 0x33333333); exchangeLR.call(this, 16, 0x0000ffff); exchangeLR.call(this, 4, 0x0f0f0f0f); // Set output M[offset] = this._lBlock; M[offset + 1] = this._rBlock; }, keySize: 64/32, ivSize: 64/32, blockSize: 64/32 }); // Swap bits across the left and right words function exchangeLR(offset, mask) { var t = ((this._lBlock >>> offset) ^ this._rBlock) & mask; this._rBlock ^= t; this._lBlock ^= t << offset; } function exchangeRL(offset, mask) { var t = ((this._rBlock >>> offset) ^ this._lBlock) & mask; this._lBlock ^= t; this._rBlock ^= t << offset; } /** * Shortcut functions to the cipher's object interface. * * @example * * var ciphertext = CryptoJS.DES.encrypt(message, key, cfg); * var plaintext = CryptoJS.DES.decrypt(ciphertext, key, cfg); */ C.DES = BlockCipher._createHelper(DES); /** * Triple-DES block cipher algorithm. */ var TripleDES = C_algo.TripleDES = BlockCipher.extend({ _doReset: function () { // Shortcuts var key = this._key; var keyWords = key.words; // Create DES instances this._des1 = DES.createEncryptor(WordArray.create(keyWords.slice(0, 2))); this._des2 = DES.createEncryptor(WordArray.create(keyWords.slice(2, 4))); this._des3 = DES.createEncryptor(WordArray.create(keyWords.slice(4, 6))); }, encryptBlock: function (M, offset) { this._des1.encryptBlock(M, offset); this._des2.decryptBlock(M, offset); this._des3.encryptBlock(M, offset); }, decryptBlock: function (M, offset) { this._des3.decryptBlock(M, offset); this._des2.encryptBlock(M, offset); this._des1.decryptBlock(M, offset); }, keySize: 192/32, ivSize: 64/32, blockSize: 64/32 }); /** * Shortcut functions to the cipher's object interface. * * @example * * var ciphertext = CryptoJS.TripleDES.encrypt(message, key, cfg); * var plaintext = CryptoJS.TripleDES.decrypt(ciphertext, key, cfg); */ C.TripleDES = BlockCipher._createHelper(TripleDES); }()); return CryptoJS.TripleDES; })); },{"./cipher-core":45,"./core":46,"./enc-base64":47,"./evpkdf":49,"./md5":54}],77:[function(_dereq_,module,exports){ ;(function (root, factory) { if (typeof exports === "object") { // CommonJS module.exports = exports = factory(_dereq_("./core")); } else if (typeof define === "function" && define.amd) { // AMD define(["./core"], factory); } else { // Global (browser) factory(root.CryptoJS); } }(this, function (CryptoJS) { (function (undefined) { // Shortcuts var C = CryptoJS; var C_lib = C.lib; var Base = C_lib.Base; var X32WordArray = C_lib.WordArray; /** * x64 namespace. */ var C_x64 = C.x64 = {}; /** * A 64-bit word. */ var X64Word = C_x64.Word = Base.extend({ /** * Initializes a newly created 64-bit word. * * @param {number} high The high 32 bits. * @param {number} low The low 32 bits. * * @example * * var x64Word = CryptoJS.x64.Word.create(0x00010203, 0x04050607); */ init: function (high, low) { this.high = high; this.low = low; } /** * Bitwise NOTs this word. * * @return {X64Word} A new x64-Word object after negating. * * @example * * var negated = x64Word.not(); */ // not: function () { // var high = ~this.high; // var low = ~this.low; // return X64Word.create(high, low); // }, /** * Bitwise ANDs this word with the passed word. * * @param {X64Word} word The x64-Word to AND with this word. * * @return {X64Word} A new x64-Word object after ANDing. * * @example * * var anded = x64Word.and(anotherX64Word); */ // and: function (word) { // var high = this.high & word.high; // var low = this.low & word.low; // return X64Word.create(high, low); // }, /** * Bitwise ORs this word with the passed word. * * @param {X64Word} word The x64-Word to OR with this word. * * @return {X64Word} A new x64-Word object after ORing. * * @example * * var ored = x64Word.or(anotherX64Word); */ // or: function (word) { // var high = this.high | word.high; // var low = this.low | word.low; // return X64Word.create(high, low); // }, /** * Bitwise XORs this word with the passed word. * * @param {X64Word} word The x64-Word to XOR with this word. * * @return {X64Word} A new x64-Word object after XORing. * * @example * * var xored = x64Word.xor(anotherX64Word); */ // xor: function (word) { // var high = this.high ^ word.high; // var low = this.low ^ word.low; // return X64Word.create(high, low); // }, /** * Shifts this word n bits to the left. * * @param {number} n The number of bits to shift. * * @return {X64Word} A new x64-Word object after shifting. * * @example * * var shifted = x64Word.shiftL(25); */ // shiftL: function (n) { // if (n < 32) { // var high = (this.high << n) | (this.low >>> (32 - n)); // var low = this.low << n; // } else { // var high = this.low << (n - 32); // var low = 0; // } // return X64Word.create(high, low); // }, /** * Shifts this word n bits to the right. * * @param {number} n The number of bits to shift. * * @return {X64Word} A new x64-Word object after shifting. * * @example * * var shifted = x64Word.shiftR(7); */ // shiftR: function (n) { // if (n < 32) { // var low = (this.low >>> n) | (this.high << (32 - n)); // var high = this.high >>> n; // } else { // var low = this.high >>> (n - 32); // var high = 0; // } // return X64Word.create(high, low); // }, /** * Rotates this word n bits to the left. * * @param {number} n The number of bits to rotate. * * @return {X64Word} A new x64-Word object after rotating. * * @example * * var rotated = x64Word.rotL(25); */ // rotL: function (n) { // return this.shiftL(n).or(this.shiftR(64 - n)); // }, /** * Rotates this word n bits to the right. * * @param {number} n The number of bits to rotate. * * @return {X64Word} A new x64-Word object after rotating. * * @example * * var rotated = x64Word.rotR(7); */ // rotR: function (n) { // return this.shiftR(n).or(this.shiftL(64 - n)); // }, /** * Adds this word with the passed word. * * @param {X64Word} word The x64-Word to add with this word. * * @return {X64Word} A new x64-Word object after adding. * * @example * * var added = x64Word.add(anotherX64Word); */ // add: function (word) { // var low = (this.low + word.low) | 0; // var carry = (low >>> 0) < (this.low >>> 0) ? 1 : 0; // var high = (this.high + word.high + carry) | 0; // return X64Word.create(high, low); // } }); /** * An array of 64-bit words. * * @property {Array} words The array of CryptoJS.x64.Word objects. * @property {number} sigBytes The number of significant bytes in this word array. */ var X64WordArray = C_x64.WordArray = Base.extend({ /** * Initializes a newly created word array. * * @param {Array} words (Optional) An array of CryptoJS.x64.Word objects. * @param {number} sigBytes (Optional) The number of significant bytes in the words. * * @example * * var wordArray = CryptoJS.x64.WordArray.create(); * * var wordArray = CryptoJS.x64.WordArray.create([ * CryptoJS.x64.Word.create(0x00010203, 0x04050607), * CryptoJS.x64.Word.create(0x18191a1b, 0x1c1d1e1f) * ]); * * var wordArray = CryptoJS.x64.WordArray.create([ * CryptoJS.x64.Word.create(0x00010203, 0x04050607), * CryptoJS.x64.Word.create(0x18191a1b, 0x1c1d1e1f) * ], 10); */ init: function (words, sigBytes) { words = this.words = words || []; if (sigBytes != undefined) { this.sigBytes = sigBytes; } else { this.sigBytes = words.length * 8; } }, /** * Converts this 64-bit word array to a 32-bit word array. * * @return {CryptoJS.lib.WordArray} This word array's data as a 32-bit word array. * * @example * * var x32WordArray = x64WordArray.toX32(); */ toX32: function () { // Shortcuts var x64Words = this.words; var x64WordsLength = x64Words.length; // Convert var x32Words = []; for (var i = 0; i < x64WordsLength; i++) { var x64Word = x64Words[i]; x32Words.push(x64Word.high); x32Words.push(x64Word.low); } return X32WordArray.create(x32Words, this.sigBytes); }, /** * Creates a copy of this word array. * * @return {X64WordArray} The clone. * * @example * * var clone = x64WordArray.clone(); */ clone: function () { var clone = Base.clone.call(this); // Clone "words" array var words = clone.words = this.words.slice(0); // Clone each X64Word object var wordsLength = words.length; for (var i = 0; i < wordsLength; i++) { words[i] = words[i].clone(); } return clone; } }); }()); return CryptoJS; })); },{"./core":46}],78:[function(_dereq_,module,exports){ (function (global){ /*! localForage -- Offline Storage, Improved Version 1.5.5 https://localforage.github.io/localForage (c) 2013-2017 Mozilla, Apache License 2.0 */ (function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.localforage = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof _dereq_=="function"&&_dereq_;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw (f.code="MODULE_NOT_FOUND", f)}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof _dereq_=="function"&&_dereq_;for(var o=0;o element; its readystatechange event will be fired asynchronously once it is inserted // into the document. Do so, thus queuing up the task. Remember to clean up once it's been called. var scriptEl = global.document.createElement('script'); scriptEl.onreadystatechange = function () { nextTick(); scriptEl.onreadystatechange = null; scriptEl.parentNode.removeChild(scriptEl); scriptEl = null; }; global.document.documentElement.appendChild(scriptEl); }; } else { scheduleDrain = function () { setTimeout(nextTick, 0); }; } } var draining; var queue = []; //named nextTick for less confusing stack traces function nextTick() { draining = true; var i, oldQueue; var len = queue.length; while (len) { oldQueue = queue; queue = []; i = -1; while (++i < len) { oldQueue[i](); } len = queue.length; } draining = false; } module.exports = immediate; function immediate(task) { if (queue.push(task) === 1 && !draining) { scheduleDrain(); } } }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) },{}],2:[function(_dereq_,module,exports){ 'use strict'; var immediate = _dereq_(1); /* istanbul ignore next */ function INTERNAL() {} var handlers = {}; var REJECTED = ['REJECTED']; var FULFILLED = ['FULFILLED']; var PENDING = ['PENDING']; module.exports = exports = Promise; function Promise(resolver) { if (typeof resolver !== 'function') { throw new TypeError('resolver must be a function'); } this.state = PENDING; this.queue = []; this.outcome = void 0; if (resolver !== INTERNAL) { safelyResolveThenable(this, resolver); } } Promise.prototype["catch"] = function (onRejected) { return this.then(null, onRejected); }; Promise.prototype.then = function (onFulfilled, onRejected) { if (typeof onFulfilled !== 'function' && this.state === FULFILLED || typeof onRejected !== 'function' && this.state === REJECTED) { return this; } var promise = new this.constructor(INTERNAL); if (this.state !== PENDING) { var resolver = this.state === FULFILLED ? onFulfilled : onRejected; unwrap(promise, resolver, this.outcome); } else { this.queue.push(new QueueItem(promise, onFulfilled, onRejected)); } return promise; }; function QueueItem(promise, onFulfilled, onRejected) { this.promise = promise; if (typeof onFulfilled === 'function') { this.onFulfilled = onFulfilled; this.callFulfilled = this.otherCallFulfilled; } if (typeof onRejected === 'function') { this.onRejected = onRejected; this.callRejected = this.otherCallRejected; } } QueueItem.prototype.callFulfilled = function (value) { handlers.resolve(this.promise, value); }; QueueItem.prototype.otherCallFulfilled = function (value) { unwrap(this.promise, this.onFulfilled, value); }; QueueItem.prototype.callRejected = function (value) { handlers.reject(this.promise, value); }; QueueItem.prototype.otherCallRejected = function (value) { unwrap(this.promise, this.onRejected, value); }; function unwrap(promise, func, value) { immediate(function () { var returnValue; try { returnValue = func(value); } catch (e) { return handlers.reject(promise, e); } if (returnValue === promise) { handlers.reject(promise, new TypeError('Cannot resolve promise with itself')); } else { handlers.resolve(promise, returnValue); } }); } handlers.resolve = function (self, value) { var result = tryCatch(getThen, value); if (result.status === 'error') { return handlers.reject(self, result.value); } var thenable = result.value; if (thenable) { safelyResolveThenable(self, thenable); } else { self.state = FULFILLED; self.outcome = value; var i = -1; var len = self.queue.length; while (++i < len) { self.queue[i].callFulfilled(value); } } return self; }; handlers.reject = function (self, error) { self.state = REJECTED; self.outcome = error; var i = -1; var len = self.queue.length; while (++i < len) { self.queue[i].callRejected(error); } return self; }; function getThen(obj) { // Make sure we only access the accessor once as required by the spec var then = obj && obj.then; if (obj && typeof obj === 'object' && typeof then === 'function') { return function appyThen() { then.apply(obj, arguments); }; } } function safelyResolveThenable(self, thenable) { // Either fulfill, reject or reject with error var called = false; function onError(value) { if (called) { return; } called = true; handlers.reject(self, value); } function onSuccess(value) { if (called) { return; } called = true; handlers.resolve(self, value); } function tryToUnwrap() { thenable(onSuccess, onError); } var result = tryCatch(tryToUnwrap); if (result.status === 'error') { onError(result.value); } } function tryCatch(func, value) { var out = {}; try { out.value = func(value); out.status = 'success'; } catch (e) { out.status = 'error'; out.value = e; } return out; } exports.resolve = resolve; function resolve(value) { if (value instanceof this) { return value; } return handlers.resolve(new this(INTERNAL), value); } exports.reject = reject; function reject(reason) { var promise = new this(INTERNAL); return handlers.reject(promise, reason); } exports.all = all; function all(iterable) { var self = this; if (Object.prototype.toString.call(iterable) !== '[object Array]') { return this.reject(new TypeError('must be an array')); } var len = iterable.length; var called = false; if (!len) { return this.resolve([]); } var values = new Array(len); var resolved = 0; var i = -1; var promise = new this(INTERNAL); while (++i < len) { allResolver(iterable[i], i); } return promise; function allResolver(value, i) { self.resolve(value).then(resolveFromAll, function (error) { if (!called) { called = true; handlers.reject(promise, error); } }); function resolveFromAll(outValue) { values[i] = outValue; if (++resolved === len && !called) { called = true; handlers.resolve(promise, values); } } } } exports.race = race; function race(iterable) { var self = this; if (Object.prototype.toString.call(iterable) !== '[object Array]') { return this.reject(new TypeError('must be an array')); } var len = iterable.length; var called = false; if (!len) { return this.resolve([]); } var i = -1; var promise = new this(INTERNAL); while (++i < len) { resolver(iterable[i]); } return promise; function resolver(value) { self.resolve(value).then(function (response) { if (!called) { called = true; handlers.resolve(promise, response); } }, function (error) { if (!called) { called = true; handlers.reject(promise, error); } }); } } },{"1":1}],3:[function(_dereq_,module,exports){ (function (global){ 'use strict'; if (typeof global.Promise !== 'function') { global.Promise = _dereq_(2); } }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) },{"2":2}],4:[function(_dereq_,module,exports){ 'use strict'; var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function getIDB() { /* global indexedDB,webkitIndexedDB,mozIndexedDB,OIndexedDB,msIndexedDB */ try { if (typeof indexedDB !== 'undefined') { return indexedDB; } if (typeof webkitIndexedDB !== 'undefined') { return webkitIndexedDB; } if (typeof mozIndexedDB !== 'undefined') { return mozIndexedDB; } if (typeof OIndexedDB !== 'undefined') { return OIndexedDB; } if (typeof msIndexedDB !== 'undefined') { return msIndexedDB; } } catch (e) { return; } } var idb = getIDB(); function isIndexedDBValid() { try { // Initialize IndexedDB; fall back to vendor-prefixed versions // if needed. if (!idb) { return false; } // We mimic PouchDB here; // // We test for openDatabase because IE Mobile identifies itself // as Safari. Oh the lulz... var isSafari = typeof openDatabase !== 'undefined' && /(Safari|iPhone|iPad|iPod)/.test(navigator.userAgent) && !/Chrome/.test(navigator.userAgent) && !/BlackBerry/.test(navigator.platform); var hasFetch = typeof fetch === 'function' && fetch.toString().indexOf('[native code') !== -1; // Safari <10.1 does not meet our requirements for IDB support (#5572) // since Safari 10.1 shipped with fetch, we can use that to detect it return (!isSafari || hasFetch) && typeof indexedDB !== 'undefined' && // some outdated implementations of IDB that appear on Samsung // and HTC Android devices <4.4 are missing IDBKeyRange // See: https://github.com/mozilla/localForage/issues/128 // See: https://github.com/mozilla/localForage/issues/272 typeof IDBKeyRange !== 'undefined'; } catch (e) { return false; } } // Abstracts constructing a Blob object, so it also works in older // browsers that don't support the native Blob constructor. (i.e. // old QtWebKit versions, at least). // Abstracts constructing a Blob object, so it also works in older // browsers that don't support the native Blob constructor. (i.e. // old QtWebKit versions, at least). function createBlob(parts, properties) { /* global BlobBuilder,MSBlobBuilder,MozBlobBuilder,WebKitBlobBuilder */ parts = parts || []; properties = properties || {}; try { return new Blob(parts, properties); } catch (e) { if (e.name !== 'TypeError') { throw e; } var Builder = typeof BlobBuilder !== 'undefined' ? BlobBuilder : typeof MSBlobBuilder !== 'undefined' ? MSBlobBuilder : typeof MozBlobBuilder !== 'undefined' ? MozBlobBuilder : WebKitBlobBuilder; var builder = new Builder(); for (var i = 0; i < parts.length; i += 1) { builder.append(parts[i]); } return builder.getBlob(properties.type); } } // This is CommonJS because lie is an external dependency, so Rollup // can just ignore it. if (typeof Promise === 'undefined') { // In the "nopromises" build this will just throw if you don't have // a global promise object, but it would throw anyway later. _dereq_(3); } var Promise$1 = Promise; function executeCallback(promise, callback) { if (callback) { promise.then(function (result) { callback(null, result); }, function (error) { callback(error); }); } } function executeTwoCallbacks(promise, callback, errorCallback) { if (typeof callback === 'function') { promise.then(callback); } if (typeof errorCallback === 'function') { promise["catch"](errorCallback); } } function normalizeKey(key) { // Cast the key to a string, as that's all we can set as a key. if (typeof key !== 'string') { console.warn(key + ' used as a key, but it is not a string.'); key = String(key); } return key; } // Some code originally from async_storage.js in // [Gaia](https://github.com/mozilla-b2g/gaia). var DETECT_BLOB_SUPPORT_STORE = 'local-forage-detect-blob-support'; var supportsBlobs; var dbContexts; var toString = Object.prototype.toString; // Transaction Modes var READ_ONLY = 'readonly'; var READ_WRITE = 'readwrite'; // Transform a binary string to an array buffer, because otherwise // weird stuff happens when you try to work with the binary string directly. // It is known. // From http://stackoverflow.com/questions/14967647/ (continues on next line) // encode-decode-image-with-base64-breaks-image (2013-04-21) function _binStringToArrayBuffer(bin) { var length = bin.length; var buf = new ArrayBuffer(length); var arr = new Uint8Array(buf); for (var i = 0; i < length; i++) { arr[i] = bin.charCodeAt(i); } return buf; } // // Blobs are not supported in all versions of IndexedDB, notably // Chrome <37 and Android <5. In those versions, storing a blob will throw. // // Various other blob bugs exist in Chrome v37-42 (inclusive). // Detecting them is expensive and confusing to users, and Chrome 37-42 // is at very low usage worldwide, so we do a hacky userAgent check instead. // // content-type bug: https://code.google.com/p/chromium/issues/detail?id=408120 // 404 bug: https://code.google.com/p/chromium/issues/detail?id=447916 // FileReader bug: https://code.google.com/p/chromium/issues/detail?id=447836 // // Code borrowed from PouchDB. See: // https://github.com/pouchdb/pouchdb/blob/master/packages/node_modules/pouchdb-adapter-idb/src/blobSupport.js // function _checkBlobSupportWithoutCaching(idb) { return new Promise$1(function (resolve) { var txn = idb.transaction(DETECT_BLOB_SUPPORT_STORE, READ_WRITE); var blob = createBlob(['']); txn.objectStore(DETECT_BLOB_SUPPORT_STORE).put(blob, 'key'); txn.onabort = function (e) { // If the transaction aborts now its due to not being able to // write to the database, likely due to the disk being full e.preventDefault(); e.stopPropagation(); resolve(false); }; txn.oncomplete = function () { var matchedChrome = navigator.userAgent.match(/Chrome\/(\d+)/); var matchedEdge = navigator.userAgent.match(/Edge\//); // MS Edge pretends to be Chrome 42: // https://msdn.microsoft.com/en-us/library/hh869301%28v=vs.85%29.aspx resolve(matchedEdge || !matchedChrome || parseInt(matchedChrome[1], 10) >= 43); }; })["catch"](function () { return false; // error, so assume unsupported }); } function _checkBlobSupport(idb) { if (typeof supportsBlobs === 'boolean') { return Promise$1.resolve(supportsBlobs); } return _checkBlobSupportWithoutCaching(idb).then(function (value) { supportsBlobs = value; return supportsBlobs; }); } function _deferReadiness(dbInfo) { var dbContext = dbContexts[dbInfo.name]; // Create a deferred object representing the current database operation. var deferredOperation = {}; deferredOperation.promise = new Promise$1(function (resolve) { deferredOperation.resolve = resolve; }); // Enqueue the deferred operation. dbContext.deferredOperations.push(deferredOperation); // Chain its promise to the database readiness. if (!dbContext.dbReady) { dbContext.dbReady = deferredOperation.promise; } else { dbContext.dbReady = dbContext.dbReady.then(function () { return deferredOperation.promise; }); } } function _advanceReadiness(dbInfo) { var dbContext = dbContexts[dbInfo.name]; // Dequeue a deferred operation. var deferredOperation = dbContext.deferredOperations.pop(); // Resolve its promise (which is part of the database readiness // chain of promises). if (deferredOperation) { deferredOperation.resolve(); } } function _rejectReadiness(dbInfo, err) { var dbContext = dbContexts[dbInfo.name]; // Dequeue a deferred operation. var deferredOperation = dbContext.deferredOperations.pop(); // Reject its promise (which is part of the database readiness // chain of promises). if (deferredOperation) { deferredOperation.reject(err); } } function _getConnection(dbInfo, upgradeNeeded) { return new Promise$1(function (resolve, reject) { if (dbInfo.db) { if (upgradeNeeded) { _deferReadiness(dbInfo); dbInfo.db.close(); } else { return resolve(dbInfo.db); } } var dbArgs = [dbInfo.name]; if (upgradeNeeded) { dbArgs.push(dbInfo.version); } var openreq = idb.open.apply(idb, dbArgs); if (upgradeNeeded) { openreq.onupgradeneeded = function (e) { var db = openreq.result; try { db.createObjectStore(dbInfo.storeName); if (e.oldVersion <= 1) { // Added when support for blob shims was added db.createObjectStore(DETECT_BLOB_SUPPORT_STORE); } } catch (ex) { if (ex.name === 'ConstraintError') { console.warn('The database "' + dbInfo.name + '"' + ' has been upgraded from version ' + e.oldVersion + ' to version ' + e.newVersion + ', but the storage "' + dbInfo.storeName + '" already exists.'); } else { throw ex; } } }; } openreq.onerror = function (e) { e.preventDefault(); reject(openreq.error); }; openreq.onsuccess = function () { resolve(openreq.result); _advanceReadiness(dbInfo); }; }); } function _getOriginalConnection(dbInfo) { return _getConnection(dbInfo, false); } function _getUpgradedConnection(dbInfo) { return _getConnection(dbInfo, true); } function _isUpgradeNeeded(dbInfo, defaultVersion) { if (!dbInfo.db) { return true; } var isNewStore = !dbInfo.db.objectStoreNames.contains(dbInfo.storeName); var isDowngrade = dbInfo.version < dbInfo.db.version; var isUpgrade = dbInfo.version > dbInfo.db.version; if (isDowngrade) { // If the version is not the default one // then warn for impossible downgrade. if (dbInfo.version !== defaultVersion) { console.warn('The database "' + dbInfo.name + '"' + ' can\'t be downgraded from version ' + dbInfo.db.version + ' to version ' + dbInfo.version + '.'); } // Align the versions to prevent errors. dbInfo.version = dbInfo.db.version; } if (isUpgrade || isNewStore) { // If the store is new then increment the version (if needed). // This will trigger an "upgradeneeded" event which is required // for creating a store. if (isNewStore) { var incVersion = dbInfo.db.version + 1; if (incVersion > dbInfo.version) { dbInfo.version = incVersion; } } return true; } return false; } // encode a blob for indexeddb engines that don't support blobs function _encodeBlob(blob) { return new Promise$1(function (resolve, reject) { var reader = new FileReader(); reader.onerror = reject; reader.onloadend = function (e) { var base64 = btoa(e.target.result || ''); resolve({ __local_forage_encoded_blob: true, data: base64, type: blob.type }); }; reader.readAsBinaryString(blob); }); } // decode an encoded blob function _decodeBlob(encodedBlob) { var arrayBuff = _binStringToArrayBuffer(atob(encodedBlob.data)); return createBlob([arrayBuff], { type: encodedBlob.type }); } // is this one of our fancy encoded blobs? function _isEncodedBlob(value) { return value && value.__local_forage_encoded_blob; } // Specialize the default `ready()` function by making it dependent // on the current database operations. Thus, the driver will be actually // ready when it's been initialized (default) *and* there are no pending // operations on the database (initiated by some other instances). function _fullyReady(callback) { var self = this; var promise = self._initReady().then(function () { var dbContext = dbContexts[self._dbInfo.name]; if (dbContext && dbContext.dbReady) { return dbContext.dbReady; } }); executeTwoCallbacks(promise, callback, callback); return promise; } // Try to establish a new db connection to replace the // current one which is broken (i.e. experiencing // InvalidStateError while creating a transaction). function _tryReconnect(dbInfo) { _deferReadiness(dbInfo); var dbContext = dbContexts[dbInfo.name]; var forages = dbContext.forages; for (var i = 0; i < forages.length; i++) { if (forages[i]._dbInfo.db) { forages[i]._dbInfo.db.close(); forages[i]._dbInfo.db = null; } } return _getConnection(dbInfo, false).then(function (db) { for (var j = 0; j < forages.length; j++) { forages[j]._dbInfo.db = db; } })["catch"](function (err) { _rejectReadiness(dbInfo, err); throw err; }); } // FF doesn't like Promises (micro-tasks) and IDDB store operations, // so we have to do it with callbacks function createTransaction(dbInfo, mode, callback) { try { var tx = dbInfo.db.transaction(dbInfo.storeName, mode); callback(null, tx); } catch (err) { if (!dbInfo.db || err.name === 'InvalidStateError') { return _tryReconnect(dbInfo).then(function () { var tx = dbInfo.db.transaction(dbInfo.storeName, mode); callback(null, tx); }); } callback(err); } } // Open the IndexedDB database (automatically creates one if one didn't // previously exist), using any options set in the config. function _initStorage(options) { var self = this; var dbInfo = { db: null }; if (options) { for (var i in options) { dbInfo[i] = options[i]; } } // Initialize a singleton container for all running localForages. if (!dbContexts) { dbContexts = {}; } // Get the current context of the database; var dbContext = dbContexts[dbInfo.name]; // ...or create a new context. if (!dbContext) { dbContext = { // Running localForages sharing a database. forages: [], // Shared database. db: null, // Database readiness (promise). dbReady: null, // Deferred operations on the database. deferredOperations: [] }; // Register the new context in the global container. dbContexts[dbInfo.name] = dbContext; } // Register itself as a running localForage in the current context. dbContext.forages.push(self); // Replace the default `ready()` function with the specialized one. if (!self._initReady) { self._initReady = self.ready; self.ready = _fullyReady; } // Create an array of initialization states of the related localForages. var initPromises = []; function ignoreErrors() { // Don't handle errors here, // just makes sure related localForages aren't pending. return Promise$1.resolve(); } for (var j = 0; j < dbContext.forages.length; j++) { var forage = dbContext.forages[j]; if (forage !== self) { // Don't wait for itself... initPromises.push(forage._initReady()["catch"](ignoreErrors)); } } // Take a snapshot of the related localForages. var forages = dbContext.forages.slice(0); // Initialize the connection process only when // all the related localForages aren't pending. return Promise$1.all(initPromises).then(function () { dbInfo.db = dbContext.db; // Get the connection or open a new one without upgrade. return _getOriginalConnection(dbInfo); }).then(function (db) { dbInfo.db = db; if (_isUpgradeNeeded(dbInfo, self._defaultConfig.version)) { // Reopen the database for upgrading. return _getUpgradedConnection(dbInfo); } return db; }).then(function (db) { dbInfo.db = dbContext.db = db; self._dbInfo = dbInfo; // Share the final connection amongst related localForages. for (var k = 0; k < forages.length; k++) { var forage = forages[k]; if (forage !== self) { // Self is already up-to-date. forage._dbInfo.db = dbInfo.db; forage._dbInfo.version = dbInfo.version; } } }); } function getItem(key, callback) { var self = this; key = normalizeKey(key); var promise = new Promise$1(function (resolve, reject) { self.ready().then(function () { createTransaction(self._dbInfo, READ_ONLY, function (err, transaction) { if (err) { return reject(err); } try { var store = transaction.objectStore(self._dbInfo.storeName); var req = store.get(key); req.onsuccess = function () { var value = req.result; if (value === undefined) { value = null; } if (_isEncodedBlob(value)) { value = _decodeBlob(value); } resolve(value); }; req.onerror = function () { reject(req.error); }; } catch (e) { reject(e); } }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } // Iterate over all items stored in database. function iterate(iterator, callback) { var self = this; var promise = new Promise$1(function (resolve, reject) { self.ready().then(function () { createTransaction(self._dbInfo, READ_ONLY, function (err, transaction) { if (err) { return reject(err); } try { var store = transaction.objectStore(self._dbInfo.storeName); var req = store.openCursor(); var iterationNumber = 1; req.onsuccess = function () { var cursor = req.result; if (cursor) { var value = cursor.value; if (_isEncodedBlob(value)) { value = _decodeBlob(value); } var result = iterator(value, cursor.key, iterationNumber++); // when the iterator callback retuns any // (non-`undefined`) value, then we stop // the iteration immediately if (result !== void 0) { resolve(result); } else { cursor["continue"](); } } else { resolve(); } }; req.onerror = function () { reject(req.error); }; } catch (e) { reject(e); } }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } function setItem(key, value, callback) { var self = this; key = normalizeKey(key); var promise = new Promise$1(function (resolve, reject) { var dbInfo; self.ready().then(function () { dbInfo = self._dbInfo; if (toString.call(value) === '[object Blob]') { return _checkBlobSupport(dbInfo.db).then(function (blobSupport) { if (blobSupport) { return value; } return _encodeBlob(value); }); } return value; }).then(function (value) { createTransaction(self._dbInfo, READ_WRITE, function (err, transaction) { if (err) { return reject(err); } try { var store = transaction.objectStore(self._dbInfo.storeName); // The reason we don't _save_ null is because IE 10 does // not support saving the `null` type in IndexedDB. How // ironic, given the bug below! // See: https://github.com/mozilla/localForage/issues/161 if (value === null) { value = undefined; } var req = store.put(value, key); transaction.oncomplete = function () { // Cast to undefined so the value passed to // callback/promise is the same as what one would get out // of `getItem()` later. This leads to some weirdness // (setItem('foo', undefined) will return `null`), but // it's not my fault localStorage is our baseline and that // it's weird. if (value === undefined) { value = null; } resolve(value); }; transaction.onabort = transaction.onerror = function () { var err = req.error ? req.error : req.transaction.error; reject(err); }; } catch (e) { reject(e); } }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } function removeItem(key, callback) { var self = this; key = normalizeKey(key); var promise = new Promise$1(function (resolve, reject) { self.ready().then(function () { createTransaction(self._dbInfo, READ_WRITE, function (err, transaction) { if (err) { return reject(err); } try { var store = transaction.objectStore(self._dbInfo.storeName); // We use a Grunt task to make this safe for IE and some // versions of Android (including those used by Cordova). // Normally IE won't like `['delete']()` and will insist on // using `['delete']()`, but we have a build step that // fixes this for us now. var req = store["delete"](key); transaction.oncomplete = function () { resolve(); }; transaction.onerror = function () { reject(req.error); }; // The request will be also be aborted if we've exceeded our storage // space. transaction.onabort = function () { var err = req.error ? req.error : req.transaction.error; reject(err); }; } catch (e) { reject(e); } }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } function clear(callback) { var self = this; var promise = new Promise$1(function (resolve, reject) { self.ready().then(function () { createTransaction(self._dbInfo, READ_WRITE, function (err, transaction) { if (err) { return reject(err); } try { var store = transaction.objectStore(self._dbInfo.storeName); var req = store.clear(); transaction.oncomplete = function () { resolve(); }; transaction.onabort = transaction.onerror = function () { var err = req.error ? req.error : req.transaction.error; reject(err); }; } catch (e) { reject(e); } }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } function length(callback) { var self = this; var promise = new Promise$1(function (resolve, reject) { self.ready().then(function () { createTransaction(self._dbInfo, READ_ONLY, function (err, transaction) { if (err) { return reject(err); } try { var store = transaction.objectStore(self._dbInfo.storeName); var req = store.count(); req.onsuccess = function () { resolve(req.result); }; req.onerror = function () { reject(req.error); }; } catch (e) { reject(e); } }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } function key(n, callback) { var self = this; var promise = new Promise$1(function (resolve, reject) { if (n < 0) { resolve(null); return; } self.ready().then(function () { createTransaction(self._dbInfo, READ_ONLY, function (err, transaction) { if (err) { return reject(err); } try { var store = transaction.objectStore(self._dbInfo.storeName); var advanced = false; var req = store.openCursor(); req.onsuccess = function () { var cursor = req.result; if (!cursor) { // this means there weren't enough keys resolve(null); return; } if (n === 0) { // We have the first key, return it if that's what they // wanted. resolve(cursor.key); } else { if (!advanced) { // Otherwise, ask the cursor to skip ahead n // records. advanced = true; cursor.advance(n); } else { // When we get here, we've got the nth key. resolve(cursor.key); } } }; req.onerror = function () { reject(req.error); }; } catch (e) { reject(e); } }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } function keys(callback) { var self = this; var promise = new Promise$1(function (resolve, reject) { self.ready().then(function () { createTransaction(self._dbInfo, READ_ONLY, function (err, transaction) { if (err) { return reject(err); } try { var store = transaction.objectStore(self._dbInfo.storeName); var req = store.openCursor(); var keys = []; req.onsuccess = function () { var cursor = req.result; if (!cursor) { resolve(keys); return; } keys.push(cursor.key); cursor["continue"](); }; req.onerror = function () { reject(req.error); }; } catch (e) { reject(e); } }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } var asyncStorage = { _driver: 'asyncStorage', _initStorage: _initStorage, _support: isIndexedDBValid(), iterate: iterate, getItem: getItem, setItem: setItem, removeItem: removeItem, clear: clear, length: length, key: key, keys: keys }; function isWebSQLValid() { return typeof openDatabase === 'function'; } // Sadly, the best way to save binary data in WebSQL/localStorage is serializing // it to Base64, so this is how we store it to prevent very strange errors with less // verbose ways of binary <-> string data storage. var BASE_CHARS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; var BLOB_TYPE_PREFIX = '~~local_forage_type~'; var BLOB_TYPE_PREFIX_REGEX = /^~~local_forage_type~([^~]+)~/; var SERIALIZED_MARKER = '__lfsc__:'; var SERIALIZED_MARKER_LENGTH = SERIALIZED_MARKER.length; // OMG the serializations! var TYPE_ARRAYBUFFER = 'arbf'; var TYPE_BLOB = 'blob'; var TYPE_INT8ARRAY = 'si08'; var TYPE_UINT8ARRAY = 'ui08'; var TYPE_UINT8CLAMPEDARRAY = 'uic8'; var TYPE_INT16ARRAY = 'si16'; var TYPE_INT32ARRAY = 'si32'; var TYPE_UINT16ARRAY = 'ur16'; var TYPE_UINT32ARRAY = 'ui32'; var TYPE_FLOAT32ARRAY = 'fl32'; var TYPE_FLOAT64ARRAY = 'fl64'; var TYPE_SERIALIZED_MARKER_LENGTH = SERIALIZED_MARKER_LENGTH + TYPE_ARRAYBUFFER.length; var toString$1 = Object.prototype.toString; function stringToBuffer(serializedString) { // Fill the string into a ArrayBuffer. var bufferLength = serializedString.length * 0.75; var len = serializedString.length; var i; var p = 0; var encoded1, encoded2, encoded3, encoded4; if (serializedString[serializedString.length - 1] === '=') { bufferLength--; if (serializedString[serializedString.length - 2] === '=') { bufferLength--; } } var buffer = new ArrayBuffer(bufferLength); var bytes = new Uint8Array(buffer); for (i = 0; i < len; i += 4) { encoded1 = BASE_CHARS.indexOf(serializedString[i]); encoded2 = BASE_CHARS.indexOf(serializedString[i + 1]); encoded3 = BASE_CHARS.indexOf(serializedString[i + 2]); encoded4 = BASE_CHARS.indexOf(serializedString[i + 3]); /*jslint bitwise: true */ bytes[p++] = encoded1 << 2 | encoded2 >> 4; bytes[p++] = (encoded2 & 15) << 4 | encoded3 >> 2; bytes[p++] = (encoded3 & 3) << 6 | encoded4 & 63; } return buffer; } // Converts a buffer to a string to store, serialized, in the backend // storage library. function bufferToString(buffer) { // base64-arraybuffer var bytes = new Uint8Array(buffer); var base64String = ''; var i; for (i = 0; i < bytes.length; i += 3) { /*jslint bitwise: true */ base64String += BASE_CHARS[bytes[i] >> 2]; base64String += BASE_CHARS[(bytes[i] & 3) << 4 | bytes[i + 1] >> 4]; base64String += BASE_CHARS[(bytes[i + 1] & 15) << 2 | bytes[i + 2] >> 6]; base64String += BASE_CHARS[bytes[i + 2] & 63]; } if (bytes.length % 3 === 2) { base64String = base64String.substring(0, base64String.length - 1) + '='; } else if (bytes.length % 3 === 1) { base64String = base64String.substring(0, base64String.length - 2) + '=='; } return base64String; } // Serialize a value, afterwards executing a callback (which usually // instructs the `setItem()` callback/promise to be executed). This is how // we store binary data with localStorage. function serialize(value, callback) { var valueType = ''; if (value) { valueType = toString$1.call(value); } // Cannot use `value instanceof ArrayBuffer` or such here, as these // checks fail when running the tests using casper.js... // // TODO: See why those tests fail and use a better solution. if (value && (valueType === '[object ArrayBuffer]' || value.buffer && toString$1.call(value.buffer) === '[object ArrayBuffer]')) { // Convert binary arrays to a string and prefix the string with // a special marker. var buffer; var marker = SERIALIZED_MARKER; if (value instanceof ArrayBuffer) { buffer = value; marker += TYPE_ARRAYBUFFER; } else { buffer = value.buffer; if (valueType === '[object Int8Array]') { marker += TYPE_INT8ARRAY; } else if (valueType === '[object Uint8Array]') { marker += TYPE_UINT8ARRAY; } else if (valueType === '[object Uint8ClampedArray]') { marker += TYPE_UINT8CLAMPEDARRAY; } else if (valueType === '[object Int16Array]') { marker += TYPE_INT16ARRAY; } else if (valueType === '[object Uint16Array]') { marker += TYPE_UINT16ARRAY; } else if (valueType === '[object Int32Array]') { marker += TYPE_INT32ARRAY; } else if (valueType === '[object Uint32Array]') { marker += TYPE_UINT32ARRAY; } else if (valueType === '[object Float32Array]') { marker += TYPE_FLOAT32ARRAY; } else if (valueType === '[object Float64Array]') { marker += TYPE_FLOAT64ARRAY; } else { callback(new Error('Failed to get type for BinaryArray')); } } callback(marker + bufferToString(buffer)); } else if (valueType === '[object Blob]') { // Conver the blob to a binaryArray and then to a string. var fileReader = new FileReader(); fileReader.onload = function () { // Backwards-compatible prefix for the blob type. var str = BLOB_TYPE_PREFIX + value.type + '~' + bufferToString(this.result); callback(SERIALIZED_MARKER + TYPE_BLOB + str); }; fileReader.readAsArrayBuffer(value); } else { try { callback(JSON.stringify(value)); } catch (e) { console.error("Couldn't convert value into a JSON string: ", value); callback(null, e); } } } // Deserialize data we've inserted into a value column/field. We place // special markers into our strings to mark them as encoded; this isn't // as nice as a meta field, but it's the only sane thing we can do whilst // keeping localStorage support intact. // // Oftentimes this will just deserialize JSON content, but if we have a // special marker (SERIALIZED_MARKER, defined above), we will extract // some kind of arraybuffer/binary data/typed array out of the string. function deserialize(value) { // If we haven't marked this string as being specially serialized (i.e. // something other than serialized JSON), we can just return it and be // done with it. if (value.substring(0, SERIALIZED_MARKER_LENGTH) !== SERIALIZED_MARKER) { return JSON.parse(value); } // The following code deals with deserializing some kind of Blob or // TypedArray. First we separate out the type of data we're dealing // with from the data itself. var serializedString = value.substring(TYPE_SERIALIZED_MARKER_LENGTH); var type = value.substring(SERIALIZED_MARKER_LENGTH, TYPE_SERIALIZED_MARKER_LENGTH); var blobType; // Backwards-compatible blob type serialization strategy. // DBs created with older versions of localForage will simply not have the blob type. if (type === TYPE_BLOB && BLOB_TYPE_PREFIX_REGEX.test(serializedString)) { var matcher = serializedString.match(BLOB_TYPE_PREFIX_REGEX); blobType = matcher[1]; serializedString = serializedString.substring(matcher[0].length); } var buffer = stringToBuffer(serializedString); // Return the right type based on the code/type set during // serialization. switch (type) { case TYPE_ARRAYBUFFER: return buffer; case TYPE_BLOB: return createBlob([buffer], { type: blobType }); case TYPE_INT8ARRAY: return new Int8Array(buffer); case TYPE_UINT8ARRAY: return new Uint8Array(buffer); case TYPE_UINT8CLAMPEDARRAY: return new Uint8ClampedArray(buffer); case TYPE_INT16ARRAY: return new Int16Array(buffer); case TYPE_UINT16ARRAY: return new Uint16Array(buffer); case TYPE_INT32ARRAY: return new Int32Array(buffer); case TYPE_UINT32ARRAY: return new Uint32Array(buffer); case TYPE_FLOAT32ARRAY: return new Float32Array(buffer); case TYPE_FLOAT64ARRAY: return new Float64Array(buffer); default: throw new Error('Unkown type: ' + type); } } var localforageSerializer = { serialize: serialize, deserialize: deserialize, stringToBuffer: stringToBuffer, bufferToString: bufferToString }; /* * Includes code from: * * base64-arraybuffer * https://github.com/niklasvh/base64-arraybuffer * * Copyright (c) 2012 Niklas von Hertzen * Licensed under the MIT license. */ // Open the WebSQL database (automatically creates one if one didn't // previously exist), using any options set in the config. function _initStorage$1(options) { var self = this; var dbInfo = { db: null }; if (options) { for (var i in options) { dbInfo[i] = typeof options[i] !== 'string' ? options[i].toString() : options[i]; } } var dbInfoPromise = new Promise$1(function (resolve, reject) { // Open the database; the openDatabase API will automatically // create it for us if it doesn't exist. try { dbInfo.db = openDatabase(dbInfo.name, String(dbInfo.version), dbInfo.description, dbInfo.size); } catch (e) { return reject(e); } // Create our key/value table if it doesn't exist. dbInfo.db.transaction(function (t) { t.executeSql('CREATE TABLE IF NOT EXISTS ' + dbInfo.storeName + ' (id INTEGER PRIMARY KEY, key unique, value)', [], function () { self._dbInfo = dbInfo; resolve(); }, function (t, error) { reject(error); }); }); }); dbInfo.serializer = localforageSerializer; return dbInfoPromise; } function getItem$1(key, callback) { var self = this; key = normalizeKey(key); var promise = new Promise$1(function (resolve, reject) { self.ready().then(function () { var dbInfo = self._dbInfo; dbInfo.db.transaction(function (t) { t.executeSql('SELECT * FROM ' + dbInfo.storeName + ' WHERE key = ? LIMIT 1', [key], function (t, results) { var result = results.rows.length ? results.rows.item(0).value : null; // Check to see if this is serialized content we need to // unpack. if (result) { result = dbInfo.serializer.deserialize(result); } resolve(result); }, function (t, error) { reject(error); }); }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } function iterate$1(iterator, callback) { var self = this; var promise = new Promise$1(function (resolve, reject) { self.ready().then(function () { var dbInfo = self._dbInfo; dbInfo.db.transaction(function (t) { t.executeSql('SELECT * FROM ' + dbInfo.storeName, [], function (t, results) { var rows = results.rows; var length = rows.length; for (var i = 0; i < length; i++) { var item = rows.item(i); var result = item.value; // Check to see if this is serialized content // we need to unpack. if (result) { result = dbInfo.serializer.deserialize(result); } result = iterator(result, item.key, i + 1); // void(0) prevents problems with redefinition // of `undefined`. if (result !== void 0) { resolve(result); return; } } resolve(); }, function (t, error) { reject(error); }); }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } function _setItem(key, value, callback, retriesLeft) { var self = this; key = normalizeKey(key); var promise = new Promise$1(function (resolve, reject) { self.ready().then(function () { // The localStorage API doesn't return undefined values in an // "expected" way, so undefined is always cast to null in all // drivers. See: https://github.com/mozilla/localForage/pull/42 if (value === undefined) { value = null; } // Save the original value to pass to the callback. var originalValue = value; var dbInfo = self._dbInfo; dbInfo.serializer.serialize(value, function (value, error) { if (error) { reject(error); } else { dbInfo.db.transaction(function (t) { t.executeSql('INSERT OR REPLACE INTO ' + dbInfo.storeName + ' (key, value) VALUES (?, ?)', [key, value], function () { resolve(originalValue); }, function (t, error) { reject(error); }); }, function (sqlError) { // The transaction failed; check // to see if it's a quota error. if (sqlError.code === sqlError.QUOTA_ERR) { // We reject the callback outright for now, but // it's worth trying to re-run the transaction. // Even if the user accepts the prompt to use // more storage on Safari, this error will // be called. // // Try to re-run the transaction. if (retriesLeft > 0) { resolve(_setItem.apply(self, [key, originalValue, callback, retriesLeft - 1])); return; } reject(sqlError); } }); } }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } function setItem$1(key, value, callback) { return _setItem.apply(this, [key, value, callback, 1]); } function removeItem$1(key, callback) { var self = this; key = normalizeKey(key); var promise = new Promise$1(function (resolve, reject) { self.ready().then(function () { var dbInfo = self._dbInfo; dbInfo.db.transaction(function (t) { t.executeSql('DELETE FROM ' + dbInfo.storeName + ' WHERE key = ?', [key], function () { resolve(); }, function (t, error) { reject(error); }); }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } // Deletes every item in the table. // TODO: Find out if this resets the AUTO_INCREMENT number. function clear$1(callback) { var self = this; var promise = new Promise$1(function (resolve, reject) { self.ready().then(function () { var dbInfo = self._dbInfo; dbInfo.db.transaction(function (t) { t.executeSql('DELETE FROM ' + dbInfo.storeName, [], function () { resolve(); }, function (t, error) { reject(error); }); }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } // Does a simple `COUNT(key)` to get the number of items stored in // localForage. function length$1(callback) { var self = this; var promise = new Promise$1(function (resolve, reject) { self.ready().then(function () { var dbInfo = self._dbInfo; dbInfo.db.transaction(function (t) { // Ahhh, SQL makes this one soooooo easy. t.executeSql('SELECT COUNT(key) as c FROM ' + dbInfo.storeName, [], function (t, results) { var result = results.rows.item(0).c; resolve(result); }, function (t, error) { reject(error); }); }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } // Return the key located at key index X; essentially gets the key from a // `WHERE id = ?`. This is the most efficient way I can think to implement // this rarely-used (in my experience) part of the API, but it can seem // inconsistent, because we do `INSERT OR REPLACE INTO` on `setItem()`, so // the ID of each key will change every time it's updated. Perhaps a stored // procedure for the `setItem()` SQL would solve this problem? // TODO: Don't change ID on `setItem()`. function key$1(n, callback) { var self = this; var promise = new Promise$1(function (resolve, reject) { self.ready().then(function () { var dbInfo = self._dbInfo; dbInfo.db.transaction(function (t) { t.executeSql('SELECT key FROM ' + dbInfo.storeName + ' WHERE id = ? LIMIT 1', [n + 1], function (t, results) { var result = results.rows.length ? results.rows.item(0).key : null; resolve(result); }, function (t, error) { reject(error); }); }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } function keys$1(callback) { var self = this; var promise = new Promise$1(function (resolve, reject) { self.ready().then(function () { var dbInfo = self._dbInfo; dbInfo.db.transaction(function (t) { t.executeSql('SELECT key FROM ' + dbInfo.storeName, [], function (t, results) { var keys = []; for (var i = 0; i < results.rows.length; i++) { keys.push(results.rows.item(i).key); } resolve(keys); }, function (t, error) { reject(error); }); }); })["catch"](reject); }); executeCallback(promise, callback); return promise; } var webSQLStorage = { _driver: 'webSQLStorage', _initStorage: _initStorage$1, _support: isWebSQLValid(), iterate: iterate$1, getItem: getItem$1, setItem: setItem$1, removeItem: removeItem$1, clear: clear$1, length: length$1, key: key$1, keys: keys$1 }; function isLocalStorageValid() { try { return typeof localStorage !== 'undefined' && 'setItem' in localStorage && typeof localStorage.setItem === 'function'; } catch (e) { return false; } } // Check if localStorage throws when saving an item function checkIfLocalStorageThrows() { var localStorageTestKey = '_localforage_support_test'; try { localStorage.setItem(localStorageTestKey, true); localStorage.removeItem(localStorageTestKey); return false; } catch (e) { return true; } } // Check if localStorage is usable and allows to save an item // This method checks if localStorage is usable in Safari Private Browsing // mode, or in any other case where the available quota for localStorage // is 0 and there wasn't any saved items yet. function _isLocalStorageUsable() { return !checkIfLocalStorageThrows() || localStorage.length > 0; } // Config the localStorage backend, using options set in the config. function _initStorage$2(options) { var self = this; var dbInfo = {}; if (options) { for (var i in options) { dbInfo[i] = options[i]; } } dbInfo.keyPrefix = dbInfo.name + '/'; if (dbInfo.storeName !== self._defaultConfig.storeName) { dbInfo.keyPrefix += dbInfo.storeName + '/'; } if (!_isLocalStorageUsable()) { return Promise$1.reject(); } self._dbInfo = dbInfo; dbInfo.serializer = localforageSerializer; return Promise$1.resolve(); } // Remove all keys from the datastore, effectively destroying all data in // the app's key/value store! function clear$2(callback) { var self = this; var promise = self.ready().then(function () { var keyPrefix = self._dbInfo.keyPrefix; for (var i = localStorage.length - 1; i >= 0; i--) { var key = localStorage.key(i); if (key.indexOf(keyPrefix) === 0) { localStorage.removeItem(key); } } }); executeCallback(promise, callback); return promise; } // Retrieve an item from the store. Unlike the original async_storage // library in Gaia, we don't modify return values at all. If a key's value // is `undefined`, we pass that value to the callback function. function getItem$2(key, callback) { var self = this; key = normalizeKey(key); var promise = self.ready().then(function () { var dbInfo = self._dbInfo; var result = localStorage.getItem(dbInfo.keyPrefix + key); // If a result was found, parse it from the serialized // string into a JS object. If result isn't truthy, the key // is likely undefined and we'll pass it straight to the // callback. if (result) { result = dbInfo.serializer.deserialize(result); } return result; }); executeCallback(promise, callback); return promise; } // Iterate over all items in the store. function iterate$2(iterator, callback) { var self = this; var promise = self.ready().then(function () { var dbInfo = self._dbInfo; var keyPrefix = dbInfo.keyPrefix; var keyPrefixLength = keyPrefix.length; var length = localStorage.length; // We use a dedicated iterator instead of the `i` variable below // so other keys we fetch in localStorage aren't counted in // the `iterationNumber` argument passed to the `iterate()` // callback. // // See: github.com/mozilla/localForage/pull/435#discussion_r38061530 var iterationNumber = 1; for (var i = 0; i < length; i++) { var key = localStorage.key(i); if (key.indexOf(keyPrefix) !== 0) { continue; } var value = localStorage.getItem(key); // If a result was found, parse it from the serialized // string into a JS object. If result isn't truthy, the // key is likely undefined and we'll pass it straight // to the iterator. if (value) { value = dbInfo.serializer.deserialize(value); } value = iterator(value, key.substring(keyPrefixLength), iterationNumber++); if (value !== void 0) { return value; } } }); executeCallback(promise, callback); return promise; } // Same as localStorage's key() method, except takes a callback. function key$2(n, callback) { var self = this; var promise = self.ready().then(function () { var dbInfo = self._dbInfo; var result; try { result = localStorage.key(n); } catch (error) { result = null; } // Remove the prefix from the key, if a key is found. if (result) { result = result.substring(dbInfo.keyPrefix.length); } return result; }); executeCallback(promise, callback); return promise; } function keys$2(callback) { var self = this; var promise = self.ready().then(function () { var dbInfo = self._dbInfo; var length = localStorage.length; var keys = []; for (var i = 0; i < length; i++) { var itemKey = localStorage.key(i); if (itemKey.indexOf(dbInfo.keyPrefix) === 0) { keys.push(itemKey.substring(dbInfo.keyPrefix.length)); } } return keys; }); executeCallback(promise, callback); return promise; } // Supply the number of keys in the datastore to the callback function. function length$2(callback) { var self = this; var promise = self.keys().then(function (keys) { return keys.length; }); executeCallback(promise, callback); return promise; } // Remove an item from the store, nice and simple. function removeItem$2(key, callback) { var self = this; key = normalizeKey(key); var promise = self.ready().then(function () { var dbInfo = self._dbInfo; localStorage.removeItem(dbInfo.keyPrefix + key); }); executeCallback(promise, callback); return promise; } // Set a key's value and run an optional callback once the value is set. // Unlike Gaia's implementation, the callback function is passed the value, // in case you want to operate on that value only after you're sure it // saved, or something like that. function setItem$2(key, value, callback) { var self = this; key = normalizeKey(key); var promise = self.ready().then(function () { // Convert undefined values to null. // https://github.com/mozilla/localForage/pull/42 if (value === undefined) { value = null; } // Save the original value to pass to the callback. var originalValue = value; return new Promise$1(function (resolve, reject) { var dbInfo = self._dbInfo; dbInfo.serializer.serialize(value, function (value, error) { if (error) { reject(error); } else { try { localStorage.setItem(dbInfo.keyPrefix + key, value); resolve(originalValue); } catch (e) { // localStorage capacity exceeded. // TODO: Make this a specific error/event. if (e.name === 'QuotaExceededError' || e.name === 'NS_ERROR_DOM_QUOTA_REACHED') { reject(e); } reject(e); } } }); }); }); executeCallback(promise, callback); return promise; } var localStorageWrapper = { _driver: 'localStorageWrapper', _initStorage: _initStorage$2, _support: isLocalStorageValid(), iterate: iterate$2, getItem: getItem$2, setItem: setItem$2, removeItem: removeItem$2, clear: clear$2, length: length$2, key: key$2, keys: keys$2 }; var isArray = Array.isArray || function (arg) { return Object.prototype.toString.call(arg) === '[object Array]'; }; // Drivers are stored here when `defineDriver()` is called. // They are shared across all instances of localForage. var DefinedDrivers = {}; var DriverSupport = {}; var DefaultDrivers = { INDEXEDDB: asyncStorage, WEBSQL: webSQLStorage, LOCALSTORAGE: localStorageWrapper }; var DefaultDriverOrder = [DefaultDrivers.INDEXEDDB._driver, DefaultDrivers.WEBSQL._driver, DefaultDrivers.LOCALSTORAGE._driver]; var LibraryMethods = ['clear', 'getItem', 'iterate', 'key', 'keys', 'length', 'removeItem', 'setItem']; var DefaultConfig = { description: '', driver: DefaultDriverOrder.slice(), name: 'localforage', // Default DB size is _JUST UNDER_ 5MB, as it's the highest size // we can use without a prompt. size: 4980736, storeName: 'keyvaluepairs', version: 1.0 }; function callWhenReady(localForageInstance, libraryMethod) { localForageInstance[libraryMethod] = function () { var _args = arguments; return localForageInstance.ready().then(function () { return localForageInstance[libraryMethod].apply(localForageInstance, _args); }); }; } function extend() { for (var i = 1; i < arguments.length; i++) { var arg = arguments[i]; if (arg) { for (var _key in arg) { if (arg.hasOwnProperty(_key)) { if (isArray(arg[_key])) { arguments[0][_key] = arg[_key].slice(); } else { arguments[0][_key] = arg[_key]; } } } } } return arguments[0]; } var LocalForage = function () { function LocalForage(options) { _classCallCheck(this, LocalForage); for (var driverTypeKey in DefaultDrivers) { if (DefaultDrivers.hasOwnProperty(driverTypeKey)) { var driver = DefaultDrivers[driverTypeKey]; var driverName = driver._driver; this[driverTypeKey] = driverName; if (!DefinedDrivers[driverName]) { // we don't need to wait for the promise, // since the default drivers can be defined // in a blocking manner this.defineDriver(driver); } } } this._defaultConfig = extend({}, DefaultConfig); this._config = extend({}, this._defaultConfig, options); this._driverSet = null; this._initDriver = null; this._ready = false; this._dbInfo = null; this._wrapLibraryMethodsWithReady(); this.setDriver(this._config.driver)["catch"](function () {}); } // Set any config values for localForage; can be called anytime before // the first API call (e.g. `getItem`, `setItem`). // We loop through options so we don't overwrite existing config // values. LocalForage.prototype.config = function config(options) { // If the options argument is an object, we use it to set values. // Otherwise, we return either a specified config value or all // config values. if ((typeof options === 'undefined' ? 'undefined' : _typeof(options)) === 'object') { // If localforage is ready and fully initialized, we can't set // any new configuration values. Instead, we return an error. if (this._ready) { return new Error('Can\'t call config() after localforage ' + 'has been used.'); } for (var i in options) { if (i === 'storeName') { options[i] = options[i].replace(/\W/g, '_'); } if (i === 'version' && typeof options[i] !== 'number') { return new Error('Database version must be a number.'); } this._config[i] = options[i]; } // after all config options are set and // the driver option is used, try setting it if ('driver' in options && options.driver) { return this.setDriver(this._config.driver); } return true; } else if (typeof options === 'string') { return this._config[options]; } else { return this._config; } }; // Used to define a custom driver, shared across all instances of // localForage. LocalForage.prototype.defineDriver = function defineDriver(driverObject, callback, errorCallback) { var promise = new Promise$1(function (resolve, reject) { try { var driverName = driverObject._driver; var complianceError = new Error('Custom driver not compliant; see ' + 'https://mozilla.github.io/localForage/#definedriver'); // A driver name should be defined and not overlap with the // library-defined, default drivers. if (!driverObject._driver) { reject(complianceError); return; } var driverMethods = LibraryMethods.concat('_initStorage'); for (var i = 0, len = driverMethods.length; i < len; i++) { var customDriverMethod = driverMethods[i]; if (!customDriverMethod || !driverObject[customDriverMethod] || typeof driverObject[customDriverMethod] !== 'function') { reject(complianceError); return; } } var setDriverSupport = function setDriverSupport(support) { if (DefinedDrivers[driverName]) { console.info('Redefining LocalForage driver: ' + driverName); } DefinedDrivers[driverName] = driverObject; DriverSupport[driverName] = support; // don't use a then, so that we can define // drivers that have simple _support methods // in a blocking manner resolve(); }; if ('_support' in driverObject) { if (driverObject._support && typeof driverObject._support === 'function') { driverObject._support().then(setDriverSupport, reject); } else { setDriverSupport(!!driverObject._support); } } else { setDriverSupport(true); } } catch (e) { reject(e); } }); executeTwoCallbacks(promise, callback, errorCallback); return promise; }; LocalForage.prototype.driver = function driver() { return this._driver || null; }; LocalForage.prototype.getDriver = function getDriver(driverName, callback, errorCallback) { var getDriverPromise = DefinedDrivers[driverName] ? Promise$1.resolve(DefinedDrivers[driverName]) : Promise$1.reject(new Error('Driver not found.')); executeTwoCallbacks(getDriverPromise, callback, errorCallback); return getDriverPromise; }; LocalForage.prototype.getSerializer = function getSerializer(callback) { var serializerPromise = Promise$1.resolve(localforageSerializer); executeTwoCallbacks(serializerPromise, callback); return serializerPromise; }; LocalForage.prototype.ready = function ready(callback) { var self = this; var promise = self._driverSet.then(function () { if (self._ready === null) { self._ready = self._initDriver(); } return self._ready; }); executeTwoCallbacks(promise, callback, callback); return promise; }; LocalForage.prototype.setDriver = function setDriver(drivers, callback, errorCallback) { var self = this; if (!isArray(drivers)) { drivers = [drivers]; } var supportedDrivers = this._getSupportedDrivers(drivers); function setDriverToConfig() { self._config.driver = self.driver(); } function extendSelfWithDriver(driver) { self._extend(driver); setDriverToConfig(); self._ready = self._initStorage(self._config); return self._ready; } function initDriver(supportedDrivers) { return function () { var currentDriverIndex = 0; function driverPromiseLoop() { while (currentDriverIndex < supportedDrivers.length) { var driverName = supportedDrivers[currentDriverIndex]; currentDriverIndex++; self._dbInfo = null; self._ready = null; return self.getDriver(driverName).then(extendSelfWithDriver)["catch"](driverPromiseLoop); } setDriverToConfig(); var error = new Error('No available storage method found.'); self._driverSet = Promise$1.reject(error); return self._driverSet; } return driverPromiseLoop(); }; } // There might be a driver initialization in progress // so wait for it to finish in order to avoid a possible // race condition to set _dbInfo var oldDriverSetDone = this._driverSet !== null ? this._driverSet["catch"](function () { return Promise$1.resolve(); }) : Promise$1.resolve(); this._driverSet = oldDriverSetDone.then(function () { var driverName = supportedDrivers[0]; self._dbInfo = null; self._ready = null; return self.getDriver(driverName).then(function (driver) { self._driver = driver._driver; setDriverToConfig(); self._wrapLibraryMethodsWithReady(); self._initDriver = initDriver(supportedDrivers); }); })["catch"](function () { setDriverToConfig(); var error = new Error('No available storage method found.'); self._driverSet = Promise$1.reject(error); return self._driverSet; }); executeTwoCallbacks(this._driverSet, callback, errorCallback); return this._driverSet; }; LocalForage.prototype.supports = function supports(driverName) { return !!DriverSupport[driverName]; }; LocalForage.prototype._extend = function _extend(libraryMethodsAndProperties) { extend(this, libraryMethodsAndProperties); }; LocalForage.prototype._getSupportedDrivers = function _getSupportedDrivers(drivers) { var supportedDrivers = []; for (var i = 0, len = drivers.length; i < len; i++) { var driverName = drivers[i]; if (this.supports(driverName)) { supportedDrivers.push(driverName); } } return supportedDrivers; }; LocalForage.prototype._wrapLibraryMethodsWithReady = function _wrapLibraryMethodsWithReady() { // Add a stub for each driver API method that delays the call to the // corresponding driver method until localForage is ready. These stubs // will be replaced by the driver methods as soon as the driver is // loaded, so there is no performance impact. for (var i = 0, len = LibraryMethods.length; i < len; i++) { callWhenReady(this, LibraryMethods[i]); } }; LocalForage.prototype.createInstance = function createInstance(options) { return new LocalForage(options); }; return LocalForage; }(); // The actual localForage object that we expose as a module or via a // global. It's extended by pulling in one of our other libraries. var localforage_js = new LocalForage(); module.exports = localforage_js; },{"3":3}]},{},[4])(4) }); }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) },{}],79:[function(_dereq_,module,exports){ // Top level file is just a mixin of submodules & constants 'use strict'; var assign = _dereq_('./lib/utils/common').assign; var deflate = _dereq_('./lib/deflate'); var inflate = _dereq_('./lib/inflate'); var constants = _dereq_('./lib/zlib/constants'); var pako = {}; assign(pako, deflate, inflate, constants); module.exports = pako; },{"./lib/deflate":80,"./lib/inflate":81,"./lib/utils/common":82,"./lib/zlib/constants":85}],80:[function(_dereq_,module,exports){ 'use strict'; var zlib_deflate = _dereq_('./zlib/deflate'); var utils = _dereq_('./utils/common'); var strings = _dereq_('./utils/strings'); var msg = _dereq_('./zlib/messages'); var ZStream = _dereq_('./zlib/zstream'); var toString = Object.prototype.toString; /* Public constants ==========================================================*/ /* ===========================================================================*/ var Z_NO_FLUSH = 0; var Z_FINISH = 4; var Z_OK = 0; var Z_STREAM_END = 1; var Z_SYNC_FLUSH = 2; var Z_DEFAULT_COMPRESSION = -1; var Z_DEFAULT_STRATEGY = 0; var Z_DEFLATED = 8; /* ===========================================================================*/ /** * class Deflate * * Generic JS-style wrapper for zlib calls. If you don't need * streaming behaviour - use more simple functions: [[deflate]], * [[deflateRaw]] and [[gzip]]. **/ /* internal * Deflate.chunks -> Array * * Chunks of output data, if [[Deflate#onData]] not overridden. **/ /** * Deflate.result -> Uint8Array|Array * * Compressed result, generated by default [[Deflate#onData]] * and [[Deflate#onEnd]] handlers. Filled after you push last chunk * (call [[Deflate#push]] with `Z_FINISH` / `true` param) or if you * push a chunk with explicit flush (call [[Deflate#push]] with * `Z_SYNC_FLUSH` param). **/ /** * Deflate.err -> Number * * Error code after deflate finished. 0 (Z_OK) on success. * You will not need it in real life, because deflate errors * are possible only on wrong options or bad `onData` / `onEnd` * custom handlers. **/ /** * Deflate.msg -> String * * Error message, if [[Deflate.err]] != 0 **/ /** * new Deflate(options) * - options (Object): zlib deflate options. * * Creates new deflator instance with specified params. Throws exception * on bad params. Supported options: * * - `level` * - `windowBits` * - `memLevel` * - `strategy` * - `dictionary` * * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) * for more information on these. * * Additional options, for internal needs: * * - `chunkSize` - size of generated data chunks (16K by default) * - `raw` (Boolean) - do raw deflate * - `gzip` (Boolean) - create gzip wrapper * - `to` (String) - if equal to 'string', then result will be "binary string" * (each char code [0..255]) * - `header` (Object) - custom header for gzip * - `text` (Boolean) - true if compressed data believed to be text * - `time` (Number) - modification time, unix timestamp * - `os` (Number) - operation system code * - `extra` (Array) - array of bytes with extra data (max 65536) * - `name` (String) - file name (binary string) * - `comment` (String) - comment (binary string) * - `hcrc` (Boolean) - true if header crc should be added * * ##### Example: * * ```javascript * var pako = require('pako') * , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9]) * , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]); * * var deflate = new pako.Deflate({ level: 3}); * * deflate.push(chunk1, false); * deflate.push(chunk2, true); // true -> last chunk * * if (deflate.err) { throw new Error(deflate.err); } * * console.log(deflate.result); * ``` **/ function Deflate(options) { if (!(this instanceof Deflate)) return new Deflate(options); this.options = utils.assign({ level: Z_DEFAULT_COMPRESSION, method: Z_DEFLATED, chunkSize: 16384, windowBits: 15, memLevel: 8, strategy: Z_DEFAULT_STRATEGY, to: '' }, options || {}); var opt = this.options; if (opt.raw && (opt.windowBits > 0)) { opt.windowBits = -opt.windowBits; } else if (opt.gzip && (opt.windowBits > 0) && (opt.windowBits < 16)) { opt.windowBits += 16; } this.err = 0; // error code, if happens (0 = Z_OK) this.msg = ''; // error message this.ended = false; // used to avoid multiple onEnd() calls this.chunks = []; // chunks of compressed data this.strm = new ZStream(); this.strm.avail_out = 0; var status = zlib_deflate.deflateInit2( this.strm, opt.level, opt.method, opt.windowBits, opt.memLevel, opt.strategy ); if (status !== Z_OK) { throw new Error(msg[status]); } if (opt.header) { zlib_deflate.deflateSetHeader(this.strm, opt.header); } if (opt.dictionary) { var dict; // Convert data if needed if (typeof opt.dictionary === 'string') { // If we need to compress text, change encoding to utf8. dict = strings.string2buf(opt.dictionary); } else if (toString.call(opt.dictionary) === '[object ArrayBuffer]') { dict = new Uint8Array(opt.dictionary); } else { dict = opt.dictionary; } status = zlib_deflate.deflateSetDictionary(this.strm, dict); if (status !== Z_OK) { throw new Error(msg[status]); } this._dict_set = true; } } /** * Deflate#push(data[, mode]) -> Boolean * - data (Uint8Array|Array|ArrayBuffer|String): input data. Strings will be * converted to utf8 byte sequence. * - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes. * See constants. Skipped or `false` means Z_NO_FLUSH, `true` means Z_FINISH. * * Sends input data to deflate pipe, generating [[Deflate#onData]] calls with * new compressed chunks. Returns `true` on success. The last data block must have * mode Z_FINISH (or `true`). That will flush internal pending buffers and call * [[Deflate#onEnd]]. For interim explicit flushes (without ending the stream) you * can use mode Z_SYNC_FLUSH, keeping the compression context. * * On fail call [[Deflate#onEnd]] with error code and return false. * * We strongly recommend to use `Uint8Array` on input for best speed (output * array format is detected automatically). Also, don't skip last param and always * use the same type in your code (boolean or number). That will improve JS speed. * * For regular `Array`-s make sure all elements are [0..255]. * * ##### Example * * ```javascript * push(chunk, false); // push one of data chunks * ... * push(chunk, true); // push last chunk * ``` **/ Deflate.prototype.push = function (data, mode) { var strm = this.strm; var chunkSize = this.options.chunkSize; var status, _mode; if (this.ended) { return false; } _mode = (mode === ~~mode) ? mode : ((mode === true) ? Z_FINISH : Z_NO_FLUSH); // Convert data if needed if (typeof data === 'string') { // If we need to compress text, change encoding to utf8. strm.input = strings.string2buf(data); } else if (toString.call(data) === '[object ArrayBuffer]') { strm.input = new Uint8Array(data); } else { strm.input = data; } strm.next_in = 0; strm.avail_in = strm.input.length; do { if (strm.avail_out === 0) { strm.output = new utils.Buf8(chunkSize); strm.next_out = 0; strm.avail_out = chunkSize; } status = zlib_deflate.deflate(strm, _mode); /* no bad return value */ if (status !== Z_STREAM_END && status !== Z_OK) { this.onEnd(status); this.ended = true; return false; } if (strm.avail_out === 0 || (strm.avail_in === 0 && (_mode === Z_FINISH || _mode === Z_SYNC_FLUSH))) { if (this.options.to === 'string') { this.onData(strings.buf2binstring(utils.shrinkBuf(strm.output, strm.next_out))); } else { this.onData(utils.shrinkBuf(strm.output, strm.next_out)); } } } while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== Z_STREAM_END); // Finalize on the last chunk. if (_mode === Z_FINISH) { status = zlib_deflate.deflateEnd(this.strm); this.onEnd(status); this.ended = true; return status === Z_OK; } // callback interim results if Z_SYNC_FLUSH. if (_mode === Z_SYNC_FLUSH) { this.onEnd(Z_OK); strm.avail_out = 0; return true; } return true; }; /** * Deflate#onData(chunk) -> Void * - chunk (Uint8Array|Array|String): output data. Type of array depends * on js engine support. When string output requested, each chunk * will be string. * * By default, stores data blocks in `chunks[]` property and glue * those in `onEnd`. Override this handler, if you need another behaviour. **/ Deflate.prototype.onData = function (chunk) { this.chunks.push(chunk); }; /** * Deflate#onEnd(status) -> Void * - status (Number): deflate status. 0 (Z_OK) on success, * other if not. * * Called once after you tell deflate that the input stream is * complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH) * or if an error happened. By default - join collected chunks, * free memory and fill `results` / `err` properties. **/ Deflate.prototype.onEnd = function (status) { // On success - join if (status === Z_OK) { if (this.options.to === 'string') { this.result = this.chunks.join(''); } else { this.result = utils.flattenChunks(this.chunks); } } this.chunks = []; this.err = status; this.msg = this.strm.msg; }; /** * deflate(data[, options]) -> Uint8Array|Array|String * - data (Uint8Array|Array|String): input data to compress. * - options (Object): zlib deflate options. * * Compress `data` with deflate algorithm and `options`. * * Supported options are: * * - level * - windowBits * - memLevel * - strategy * - dictionary * * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) * for more information on these. * * Sugar (options): * * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify * negative windowBits implicitly. * - `to` (String) - if equal to 'string', then result will be "binary string" * (each char code [0..255]) * * ##### Example: * * ```javascript * var pako = require('pako') * , data = Uint8Array([1,2,3,4,5,6,7,8,9]); * * console.log(pako.deflate(data)); * ``` **/ function deflate(input, options) { var deflator = new Deflate(options); deflator.push(input, true); // That will never happens, if you don't cheat with options :) if (deflator.err) { throw deflator.msg || msg[deflator.err]; } return deflator.result; } /** * deflateRaw(data[, options]) -> Uint8Array|Array|String * - data (Uint8Array|Array|String): input data to compress. * - options (Object): zlib deflate options. * * The same as [[deflate]], but creates raw data, without wrapper * (header and adler32 crc). **/ function deflateRaw(input, options) { options = options || {}; options.raw = true; return deflate(input, options); } /** * gzip(data[, options]) -> Uint8Array|Array|String * - data (Uint8Array|Array|String): input data to compress. * - options (Object): zlib deflate options. * * The same as [[deflate]], but create gzip wrapper instead of * deflate one. **/ function gzip(input, options) { options = options || {}; options.gzip = true; return deflate(input, options); } exports.Deflate = Deflate; exports.deflate = deflate; exports.deflateRaw = deflateRaw; exports.gzip = gzip; },{"./utils/common":82,"./utils/strings":83,"./zlib/deflate":87,"./zlib/messages":92,"./zlib/zstream":94}],81:[function(_dereq_,module,exports){ 'use strict'; var zlib_inflate = _dereq_('./zlib/inflate'); var utils = _dereq_('./utils/common'); var strings = _dereq_('./utils/strings'); var c = _dereq_('./zlib/constants'); var msg = _dereq_('./zlib/messages'); var ZStream = _dereq_('./zlib/zstream'); var GZheader = _dereq_('./zlib/gzheader'); var toString = Object.prototype.toString; /** * class Inflate * * Generic JS-style wrapper for zlib calls. If you don't need * streaming behaviour - use more simple functions: [[inflate]] * and [[inflateRaw]]. **/ /* internal * inflate.chunks -> Array * * Chunks of output data, if [[Inflate#onData]] not overridden. **/ /** * Inflate.result -> Uint8Array|Array|String * * Uncompressed result, generated by default [[Inflate#onData]] * and [[Inflate#onEnd]] handlers. Filled after you push last chunk * (call [[Inflate#push]] with `Z_FINISH` / `true` param) or if you * push a chunk with explicit flush (call [[Inflate#push]] with * `Z_SYNC_FLUSH` param). **/ /** * Inflate.err -> Number * * Error code after inflate finished. 0 (Z_OK) on success. * Should be checked if broken data possible. **/ /** * Inflate.msg -> String * * Error message, if [[Inflate.err]] != 0 **/ /** * new Inflate(options) * - options (Object): zlib inflate options. * * Creates new inflator instance with specified params. Throws exception * on bad params. Supported options: * * - `windowBits` * - `dictionary` * * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) * for more information on these. * * Additional options, for internal needs: * * - `chunkSize` - size of generated data chunks (16K by default) * - `raw` (Boolean) - do raw inflate * - `to` (String) - if equal to 'string', then result will be converted * from utf8 to utf16 (javascript) string. When string output requested, * chunk length can differ from `chunkSize`, depending on content. * * By default, when no options set, autodetect deflate/gzip data format via * wrapper header. * * ##### Example: * * ```javascript * var pako = require('pako') * , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9]) * , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]); * * var inflate = new pako.Inflate({ level: 3}); * * inflate.push(chunk1, false); * inflate.push(chunk2, true); // true -> last chunk * * if (inflate.err) { throw new Error(inflate.err); } * * console.log(inflate.result); * ``` **/ function Inflate(options) { if (!(this instanceof Inflate)) return new Inflate(options); this.options = utils.assign({ chunkSize: 16384, windowBits: 0, to: '' }, options || {}); var opt = this.options; // Force window size for `raw` data, if not set directly, // because we have no header for autodetect. if (opt.raw && (opt.windowBits >= 0) && (opt.windowBits < 16)) { opt.windowBits = -opt.windowBits; if (opt.windowBits === 0) { opt.windowBits = -15; } } // If `windowBits` not defined (and mode not raw) - set autodetect flag for gzip/deflate if ((opt.windowBits >= 0) && (opt.windowBits < 16) && !(options && options.windowBits)) { opt.windowBits += 32; } // Gzip header has no info about windows size, we can do autodetect only // for deflate. So, if window size not set, force it to max when gzip possible if ((opt.windowBits > 15) && (opt.windowBits < 48)) { // bit 3 (16) -> gzipped data // bit 4 (32) -> autodetect gzip/deflate if ((opt.windowBits & 15) === 0) { opt.windowBits |= 15; } } this.err = 0; // error code, if happens (0 = Z_OK) this.msg = ''; // error message this.ended = false; // used to avoid multiple onEnd() calls this.chunks = []; // chunks of compressed data this.strm = new ZStream(); this.strm.avail_out = 0; var status = zlib_inflate.inflateInit2( this.strm, opt.windowBits ); if (status !== c.Z_OK) { throw new Error(msg[status]); } this.header = new GZheader(); zlib_inflate.inflateGetHeader(this.strm, this.header); } /** * Inflate#push(data[, mode]) -> Boolean * - data (Uint8Array|Array|ArrayBuffer|String): input data * - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes. * See constants. Skipped or `false` means Z_NO_FLUSH, `true` means Z_FINISH. * * Sends input data to inflate pipe, generating [[Inflate#onData]] calls with * new output chunks. Returns `true` on success. The last data block must have * mode Z_FINISH (or `true`). That will flush internal pending buffers and call * [[Inflate#onEnd]]. For interim explicit flushes (without ending the stream) you * can use mode Z_SYNC_FLUSH, keeping the decompression context. * * On fail call [[Inflate#onEnd]] with error code and return false. * * We strongly recommend to use `Uint8Array` on input for best speed (output * format is detected automatically). Also, don't skip last param and always * use the same type in your code (boolean or number). That will improve JS speed. * * For regular `Array`-s make sure all elements are [0..255]. * * ##### Example * * ```javascript * push(chunk, false); // push one of data chunks * ... * push(chunk, true); // push last chunk * ``` **/ Inflate.prototype.push = function (data, mode) { var strm = this.strm; var chunkSize = this.options.chunkSize; var dictionary = this.options.dictionary; var status, _mode; var next_out_utf8, tail, utf8str; var dict; // Flag to properly process Z_BUF_ERROR on testing inflate call // when we check that all output data was flushed. var allowBufError = false; if (this.ended) { return false; } _mode = (mode === ~~mode) ? mode : ((mode === true) ? c.Z_FINISH : c.Z_NO_FLUSH); // Convert data if needed if (typeof data === 'string') { // Only binary strings can be decompressed on practice strm.input = strings.binstring2buf(data); } else if (toString.call(data) === '[object ArrayBuffer]') { strm.input = new Uint8Array(data); } else { strm.input = data; } strm.next_in = 0; strm.avail_in = strm.input.length; do { if (strm.avail_out === 0) { strm.output = new utils.Buf8(chunkSize); strm.next_out = 0; strm.avail_out = chunkSize; } status = zlib_inflate.inflate(strm, c.Z_NO_FLUSH); /* no bad return value */ if (status === c.Z_NEED_DICT && dictionary) { // Convert data if needed if (typeof dictionary === 'string') { dict = strings.string2buf(dictionary); } else if (toString.call(dictionary) === '[object ArrayBuffer]') { dict = new Uint8Array(dictionary); } else { dict = dictionary; } status = zlib_inflate.inflateSetDictionary(this.strm, dict); } if (status === c.Z_BUF_ERROR && allowBufError === true) { status = c.Z_OK; allowBufError = false; } if (status !== c.Z_STREAM_END && status !== c.Z_OK) { this.onEnd(status); this.ended = true; return false; } if (strm.next_out) { if (strm.avail_out === 0 || status === c.Z_STREAM_END || (strm.avail_in === 0 && (_mode === c.Z_FINISH || _mode === c.Z_SYNC_FLUSH))) { if (this.options.to === 'string') { next_out_utf8 = strings.utf8border(strm.output, strm.next_out); tail = strm.next_out - next_out_utf8; utf8str = strings.buf2string(strm.output, next_out_utf8); // move tail strm.next_out = tail; strm.avail_out = chunkSize - tail; if (tail) { utils.arraySet(strm.output, strm.output, next_out_utf8, tail, 0); } this.onData(utf8str); } else { this.onData(utils.shrinkBuf(strm.output, strm.next_out)); } } } // When no more input data, we should check that internal inflate buffers // are flushed. The only way to do it when avail_out = 0 - run one more // inflate pass. But if output data not exists, inflate return Z_BUF_ERROR. // Here we set flag to process this error properly. // // NOTE. Deflate does not return error in this case and does not needs such // logic. if (strm.avail_in === 0 && strm.avail_out === 0) { allowBufError = true; } } while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== c.Z_STREAM_END); if (status === c.Z_STREAM_END) { _mode = c.Z_FINISH; } // Finalize on the last chunk. if (_mode === c.Z_FINISH) { status = zlib_inflate.inflateEnd(this.strm); this.onEnd(status); this.ended = true; return status === c.Z_OK; } // callback interim results if Z_SYNC_FLUSH. if (_mode === c.Z_SYNC_FLUSH) { this.onEnd(c.Z_OK); strm.avail_out = 0; return true; } return true; }; /** * Inflate#onData(chunk) -> Void * - chunk (Uint8Array|Array|String): output data. Type of array depends * on js engine support. When string output requested, each chunk * will be string. * * By default, stores data blocks in `chunks[]` property and glue * those in `onEnd`. Override this handler, if you need another behaviour. **/ Inflate.prototype.onData = function (chunk) { this.chunks.push(chunk); }; /** * Inflate#onEnd(status) -> Void * - status (Number): inflate status. 0 (Z_OK) on success, * other if not. * * Called either after you tell inflate that the input stream is * complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH) * or if an error happened. By default - join collected chunks, * free memory and fill `results` / `err` properties. **/ Inflate.prototype.onEnd = function (status) { // On success - join if (status === c.Z_OK) { if (this.options.to === 'string') { // Glue & convert here, until we teach pako to send // utf8 aligned strings to onData this.result = this.chunks.join(''); } else { this.result = utils.flattenChunks(this.chunks); } } this.chunks = []; this.err = status; this.msg = this.strm.msg; }; /** * inflate(data[, options]) -> Uint8Array|Array|String * - data (Uint8Array|Array|String): input data to decompress. * - options (Object): zlib inflate options. * * Decompress `data` with inflate/ungzip and `options`. Autodetect * format via wrapper header by default. That's why we don't provide * separate `ungzip` method. * * Supported options are: * * - windowBits * * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) * for more information. * * Sugar (options): * * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify * negative windowBits implicitly. * - `to` (String) - if equal to 'string', then result will be converted * from utf8 to utf16 (javascript) string. When string output requested, * chunk length can differ from `chunkSize`, depending on content. * * * ##### Example: * * ```javascript * var pako = require('pako') * , input = pako.deflate([1,2,3,4,5,6,7,8,9]) * , output; * * try { * output = pako.inflate(input); * } catch (err) * console.log(err); * } * ``` **/ function inflate(input, options) { var inflator = new Inflate(options); inflator.push(input, true); // That will never happens, if you don't cheat with options :) if (inflator.err) { throw inflator.msg || msg[inflator.err]; } return inflator.result; } /** * inflateRaw(data[, options]) -> Uint8Array|Array|String * - data (Uint8Array|Array|String): input data to decompress. * - options (Object): zlib inflate options. * * The same as [[inflate]], but creates raw data, without wrapper * (header and adler32 crc). **/ function inflateRaw(input, options) { options = options || {}; options.raw = true; return inflate(input, options); } /** * ungzip(data[, options]) -> Uint8Array|Array|String * - data (Uint8Array|Array|String): input data to decompress. * - options (Object): zlib inflate options. * * Just shortcut to [[inflate]], because it autodetects format * by header.content. Done for convenience. **/ exports.Inflate = Inflate; exports.inflate = inflate; exports.inflateRaw = inflateRaw; exports.ungzip = inflate; },{"./utils/common":82,"./utils/strings":83,"./zlib/constants":85,"./zlib/gzheader":88,"./zlib/inflate":90,"./zlib/messages":92,"./zlib/zstream":94}],82:[function(_dereq_,module,exports){ 'use strict'; var TYPED_OK = (typeof Uint8Array !== 'undefined') && (typeof Uint16Array !== 'undefined') && (typeof Int32Array !== 'undefined'); function _has(obj, key) { return Object.prototype.hasOwnProperty.call(obj, key); } exports.assign = function (obj /*from1, from2, from3, ...*/) { var sources = Array.prototype.slice.call(arguments, 1); while (sources.length) { var source = sources.shift(); if (!source) { continue; } if (typeof source !== 'object') { throw new TypeError(source + 'must be non-object'); } for (var p in source) { if (_has(source, p)) { obj[p] = source[p]; } } } return obj; }; // reduce buffer size, avoiding mem copy exports.shrinkBuf = function (buf, size) { if (buf.length === size) { return buf; } if (buf.subarray) { return buf.subarray(0, size); } buf.length = size; return buf; }; var fnTyped = { arraySet: function (dest, src, src_offs, len, dest_offs) { if (src.subarray && dest.subarray) { dest.set(src.subarray(src_offs, src_offs + len), dest_offs); return; } // Fallback to ordinary array for (var i = 0; i < len; i++) { dest[dest_offs + i] = src[src_offs + i]; } }, // Join array of chunks to single array. flattenChunks: function (chunks) { var i, l, len, pos, chunk, result; // calculate data length len = 0; for (i = 0, l = chunks.length; i < l; i++) { len += chunks[i].length; } // join chunks result = new Uint8Array(len); pos = 0; for (i = 0, l = chunks.length; i < l; i++) { chunk = chunks[i]; result.set(chunk, pos); pos += chunk.length; } return result; } }; var fnUntyped = { arraySet: function (dest, src, src_offs, len, dest_offs) { for (var i = 0; i < len; i++) { dest[dest_offs + i] = src[src_offs + i]; } }, // Join array of chunks to single array. flattenChunks: function (chunks) { return [].concat.apply([], chunks); } }; // Enable/Disable typed arrays use, for testing // exports.setTyped = function (on) { if (on) { exports.Buf8 = Uint8Array; exports.Buf16 = Uint16Array; exports.Buf32 = Int32Array; exports.assign(exports, fnTyped); } else { exports.Buf8 = Array; exports.Buf16 = Array; exports.Buf32 = Array; exports.assign(exports, fnUntyped); } }; exports.setTyped(TYPED_OK); },{}],83:[function(_dereq_,module,exports){ // String encode/decode helpers 'use strict'; var utils = _dereq_('./common'); // Quick check if we can use fast array to bin string conversion // // - apply(Array) can fail on Android 2.2 // - apply(Uint8Array) can fail on iOS 5.1 Safari // var STR_APPLY_OK = true; var STR_APPLY_UIA_OK = true; try { String.fromCharCode.apply(null, [ 0 ]); } catch (__) { STR_APPLY_OK = false; } try { String.fromCharCode.apply(null, new Uint8Array(1)); } catch (__) { STR_APPLY_UIA_OK = false; } // Table with utf8 lengths (calculated by first byte of sequence) // Note, that 5 & 6-byte values and some 4-byte values can not be represented in JS, // because max possible codepoint is 0x10ffff var _utf8len = new utils.Buf8(256); for (var q = 0; q < 256; q++) { _utf8len[q] = (q >= 252 ? 6 : q >= 248 ? 5 : q >= 240 ? 4 : q >= 224 ? 3 : q >= 192 ? 2 : 1); } _utf8len[254] = _utf8len[254] = 1; // Invalid sequence start // convert string to array (typed, when possible) exports.string2buf = function (str) { var buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0; // count binary size for (m_pos = 0; m_pos < str_len; m_pos++) { c = str.charCodeAt(m_pos); if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) { c2 = str.charCodeAt(m_pos + 1); if ((c2 & 0xfc00) === 0xdc00) { c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00); m_pos++; } } buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4; } // allocate buffer buf = new utils.Buf8(buf_len); // convert for (i = 0, m_pos = 0; i < buf_len; m_pos++) { c = str.charCodeAt(m_pos); if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) { c2 = str.charCodeAt(m_pos + 1); if ((c2 & 0xfc00) === 0xdc00) { c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00); m_pos++; } } if (c < 0x80) { /* one byte */ buf[i++] = c; } else if (c < 0x800) { /* two bytes */ buf[i++] = 0xC0 | (c >>> 6); buf[i++] = 0x80 | (c & 0x3f); } else if (c < 0x10000) { /* three bytes */ buf[i++] = 0xE0 | (c >>> 12); buf[i++] = 0x80 | (c >>> 6 & 0x3f); buf[i++] = 0x80 | (c & 0x3f); } else { /* four bytes */ buf[i++] = 0xf0 | (c >>> 18); buf[i++] = 0x80 | (c >>> 12 & 0x3f); buf[i++] = 0x80 | (c >>> 6 & 0x3f); buf[i++] = 0x80 | (c & 0x3f); } } return buf; }; // Helper (used in 2 places) function buf2binstring(buf, len) { // use fallback for big arrays to avoid stack overflow if (len < 65537) { if ((buf.subarray && STR_APPLY_UIA_OK) || (!buf.subarray && STR_APPLY_OK)) { return String.fromCharCode.apply(null, utils.shrinkBuf(buf, len)); } } var result = ''; for (var i = 0; i < len; i++) { result += String.fromCharCode(buf[i]); } return result; } // Convert byte array to binary string exports.buf2binstring = function (buf) { return buf2binstring(buf, buf.length); }; // Convert binary string (typed, when possible) exports.binstring2buf = function (str) { var buf = new utils.Buf8(str.length); for (var i = 0, len = buf.length; i < len; i++) { buf[i] = str.charCodeAt(i); } return buf; }; // convert array to string exports.buf2string = function (buf, max) { var i, out, c, c_len; var len = max || buf.length; // Reserve max possible length (2 words per char) // NB: by unknown reasons, Array is significantly faster for // String.fromCharCode.apply than Uint16Array. var utf16buf = new Array(len * 2); for (out = 0, i = 0; i < len;) { c = buf[i++]; // quick process ascii if (c < 0x80) { utf16buf[out++] = c; continue; } c_len = _utf8len[c]; // skip 5 & 6 byte codes if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len - 1; continue; } // apply mask on first byte c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07; // join the rest while (c_len > 1 && i < len) { c = (c << 6) | (buf[i++] & 0x3f); c_len--; } // terminated by end of string? if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; } if (c < 0x10000) { utf16buf[out++] = c; } else { c -= 0x10000; utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff); utf16buf[out++] = 0xdc00 | (c & 0x3ff); } } return buf2binstring(utf16buf, out); }; // Calculate max possible position in utf8 buffer, // that will not break sequence. If that's not possible // - (very small limits) return max size as is. // // buf[] - utf8 bytes array // max - length limit (mandatory); exports.utf8border = function (buf, max) { var pos; max = max || buf.length; if (max > buf.length) { max = buf.length; } // go back from last position, until start of sequence found pos = max - 1; while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; } // Very small and broken sequence, // return max, because we should return something anyway. if (pos < 0) { return max; } // If we came to start of buffer - that means buffer is too small, // return max too. if (pos === 0) { return max; } return (pos + _utf8len[buf[pos]] > max) ? pos : max; }; },{"./common":82}],84:[function(_dereq_,module,exports){ 'use strict'; // Note: adler32 takes 12% for level 0 and 2% for level 6. // It isn't worth it to make additional optimizations as in original. // Small size is preferable. // (C) 1995-2013 Jean-loup Gailly and Mark Adler // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // 3. This notice may not be removed or altered from any source distribution. function adler32(adler, buf, len, pos) { var s1 = (adler & 0xffff) |0, s2 = ((adler >>> 16) & 0xffff) |0, n = 0; while (len !== 0) { // Set limit ~ twice less than 5552, to keep // s2 in 31-bits, because we force signed ints. // in other case %= will fail. n = len > 2000 ? 2000 : len; len -= n; do { s1 = (s1 + buf[pos++]) |0; s2 = (s2 + s1) |0; } while (--n); s1 %= 65521; s2 %= 65521; } return (s1 | (s2 << 16)) |0; } module.exports = adler32; },{}],85:[function(_dereq_,module,exports){ 'use strict'; // (C) 1995-2013 Jean-loup Gailly and Mark Adler // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // 3. This notice may not be removed or altered from any source distribution. module.exports = { /* Allowed flush values; see deflate() and inflate() below for details */ Z_NO_FLUSH: 0, Z_PARTIAL_FLUSH: 1, Z_SYNC_FLUSH: 2, Z_FULL_FLUSH: 3, Z_FINISH: 4, Z_BLOCK: 5, Z_TREES: 6, /* Return codes for the compression/decompression functions. Negative values * are errors, positive values are used for special but normal events. */ Z_OK: 0, Z_STREAM_END: 1, Z_NEED_DICT: 2, Z_ERRNO: -1, Z_STREAM_ERROR: -2, Z_DATA_ERROR: -3, //Z_MEM_ERROR: -4, Z_BUF_ERROR: -5, //Z_VERSION_ERROR: -6, /* compression levels */ Z_NO_COMPRESSION: 0, Z_BEST_SPEED: 1, Z_BEST_COMPRESSION: 9, Z_DEFAULT_COMPRESSION: -1, Z_FILTERED: 1, Z_HUFFMAN_ONLY: 2, Z_RLE: 3, Z_FIXED: 4, Z_DEFAULT_STRATEGY: 0, /* Possible values of the data_type field (though see inflate()) */ Z_BINARY: 0, Z_TEXT: 1, //Z_ASCII: 1, // = Z_TEXT (deprecated) Z_UNKNOWN: 2, /* The deflate compression method */ Z_DEFLATED: 8 //Z_NULL: null // Use -1 or null inline, depending on var type }; },{}],86:[function(_dereq_,module,exports){ 'use strict'; // Note: we can't get significant speed boost here. // So write code to minimize size - no pregenerated tables // and array tools dependencies. // (C) 1995-2013 Jean-loup Gailly and Mark Adler // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // 3. This notice may not be removed or altered from any source distribution. // Use ordinary array, since untyped makes no boost here function makeTable() { var c, table = []; for (var n = 0; n < 256; n++) { c = n; for (var k = 0; k < 8; k++) { c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1)); } table[n] = c; } return table; } // Create table on load. Just 255 signed longs. Not a problem. var crcTable = makeTable(); function crc32(crc, buf, len, pos) { var t = crcTable, end = pos + len; crc ^= -1; for (var i = pos; i < end; i++) { crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF]; } return (crc ^ (-1)); // >>> 0; } module.exports = crc32; },{}],87:[function(_dereq_,module,exports){ 'use strict'; // (C) 1995-2013 Jean-loup Gailly and Mark Adler // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // 3. This notice may not be removed or altered from any source distribution. var utils = _dereq_('../utils/common'); var trees = _dereq_('./trees'); var adler32 = _dereq_('./adler32'); var crc32 = _dereq_('./crc32'); var msg = _dereq_('./messages'); /* Public constants ==========================================================*/ /* ===========================================================================*/ /* Allowed flush values; see deflate() and inflate() below for details */ var Z_NO_FLUSH = 0; var Z_PARTIAL_FLUSH = 1; //var Z_SYNC_FLUSH = 2; var Z_FULL_FLUSH = 3; var Z_FINISH = 4; var Z_BLOCK = 5; //var Z_TREES = 6; /* Return codes for the compression/decompression functions. Negative values * are errors, positive values are used for special but normal events. */ var Z_OK = 0; var Z_STREAM_END = 1; //var Z_NEED_DICT = 2; //var Z_ERRNO = -1; var Z_STREAM_ERROR = -2; var Z_DATA_ERROR = -3; //var Z_MEM_ERROR = -4; var Z_BUF_ERROR = -5; //var Z_VERSION_ERROR = -6; /* compression levels */ //var Z_NO_COMPRESSION = 0; //var Z_BEST_SPEED = 1; //var Z_BEST_COMPRESSION = 9; var Z_DEFAULT_COMPRESSION = -1; var Z_FILTERED = 1; var Z_HUFFMAN_ONLY = 2; var Z_RLE = 3; var Z_FIXED = 4; var Z_DEFAULT_STRATEGY = 0; /* Possible values of the data_type field (though see inflate()) */ //var Z_BINARY = 0; //var Z_TEXT = 1; //var Z_ASCII = 1; // = Z_TEXT var Z_UNKNOWN = 2; /* The deflate compression method */ var Z_DEFLATED = 8; /*============================================================================*/ var MAX_MEM_LEVEL = 9; /* Maximum value for memLevel in deflateInit2 */ var MAX_WBITS = 15; /* 32K LZ77 window */ var DEF_MEM_LEVEL = 8; var LENGTH_CODES = 29; /* number of length codes, not counting the special END_BLOCK code */ var LITERALS = 256; /* number of literal bytes 0..255 */ var L_CODES = LITERALS + 1 + LENGTH_CODES; /* number of Literal or Length codes, including the END_BLOCK code */ var D_CODES = 30; /* number of distance codes */ var BL_CODES = 19; /* number of codes used to transfer the bit lengths */ var HEAP_SIZE = 2 * L_CODES + 1; /* maximum heap size */ var MAX_BITS = 15; /* All codes must not exceed MAX_BITS bits */ var MIN_MATCH = 3; var MAX_MATCH = 258; var MIN_LOOKAHEAD = (MAX_MATCH + MIN_MATCH + 1); var PRESET_DICT = 0x20; var INIT_STATE = 42; var EXTRA_STATE = 69; var NAME_STATE = 73; var COMMENT_STATE = 91; var HCRC_STATE = 103; var BUSY_STATE = 113; var FINISH_STATE = 666; var BS_NEED_MORE = 1; /* block not completed, need more input or more output */ var BS_BLOCK_DONE = 2; /* block flush performed */ var BS_FINISH_STARTED = 3; /* finish started, need only more output at next deflate */ var BS_FINISH_DONE = 4; /* finish done, accept no more input or output */ var OS_CODE = 0x03; // Unix :) . Don't detect, use this default. function err(strm, errorCode) { strm.msg = msg[errorCode]; return errorCode; } function rank(f) { return ((f) << 1) - ((f) > 4 ? 9 : 0); } function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } /* ========================================================================= * Flush as much pending output as possible. All deflate() output goes * through this function so some applications may wish to modify it * to avoid allocating a large strm->output buffer and copying into it. * (See also read_buf()). */ function flush_pending(strm) { var s = strm.state; //_tr_flush_bits(s); var len = s.pending; if (len > strm.avail_out) { len = strm.avail_out; } if (len === 0) { return; } utils.arraySet(strm.output, s.pending_buf, s.pending_out, len, strm.next_out); strm.next_out += len; s.pending_out += len; strm.total_out += len; strm.avail_out -= len; s.pending -= len; if (s.pending === 0) { s.pending_out = 0; } } function flush_block_only(s, last) { trees._tr_flush_block(s, (s.block_start >= 0 ? s.block_start : -1), s.strstart - s.block_start, last); s.block_start = s.strstart; flush_pending(s.strm); } function put_byte(s, b) { s.pending_buf[s.pending++] = b; } /* ========================================================================= * Put a short in the pending buffer. The 16-bit value is put in MSB order. * IN assertion: the stream state is correct and there is enough room in * pending_buf. */ function putShortMSB(s, b) { // put_byte(s, (Byte)(b >> 8)); // put_byte(s, (Byte)(b & 0xff)); s.pending_buf[s.pending++] = (b >>> 8) & 0xff; s.pending_buf[s.pending++] = b & 0xff; } /* =========================================================================== * Read a new buffer from the current input stream, update the adler32 * and total number of bytes read. All deflate() input goes through * this function so some applications may wish to modify it to avoid * allocating a large strm->input buffer and copying from it. * (See also flush_pending()). */ function read_buf(strm, buf, start, size) { var len = strm.avail_in; if (len > size) { len = size; } if (len === 0) { return 0; } strm.avail_in -= len; // zmemcpy(buf, strm->next_in, len); utils.arraySet(buf, strm.input, strm.next_in, len, start); if (strm.state.wrap === 1) { strm.adler = adler32(strm.adler, buf, len, start); } else if (strm.state.wrap === 2) { strm.adler = crc32(strm.adler, buf, len, start); } strm.next_in += len; strm.total_in += len; return len; } /* =========================================================================== * Set match_start to the longest match starting at the given string and * return its length. Matches shorter or equal to prev_length are discarded, * in which case the result is equal to prev_length and match_start is * garbage. * IN assertions: cur_match is the head of the hash chain for the current * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 * OUT assertion: the match length is not greater than s->lookahead. */ function longest_match(s, cur_match) { var chain_length = s.max_chain_length; /* max hash chain length */ var scan = s.strstart; /* current string */ var match; /* matched string */ var len; /* length of current match */ var best_len = s.prev_length; /* best match length so far */ var nice_match = s.nice_match; /* stop if match long enough */ var limit = (s.strstart > (s.w_size - MIN_LOOKAHEAD)) ? s.strstart - (s.w_size - MIN_LOOKAHEAD) : 0/*NIL*/; var _win = s.window; // shortcut var wmask = s.w_mask; var prev = s.prev; /* Stop when cur_match becomes <= limit. To simplify the code, * we prevent matches with the string of window index 0. */ var strend = s.strstart + MAX_MATCH; var scan_end1 = _win[scan + best_len - 1]; var scan_end = _win[scan + best_len]; /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. * It is easy to get rid of this optimization if necessary. */ // Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); /* Do not waste too much time if we already have a good match: */ if (s.prev_length >= s.good_match) { chain_length >>= 2; } /* Do not look for matches beyond the end of the input. This is necessary * to make deflate deterministic. */ if (nice_match > s.lookahead) { nice_match = s.lookahead; } // Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); do { // Assert(cur_match < s->strstart, "no future"); match = cur_match; /* Skip to next match if the match length cannot increase * or if the match length is less than 2. Note that the checks below * for insufficient lookahead only occur occasionally for performance * reasons. Therefore uninitialized memory will be accessed, and * conditional jumps will be made that depend on those values. * However the length of the match is limited to the lookahead, so * the output of deflate is not affected by the uninitialized values. */ if (_win[match + best_len] !== scan_end || _win[match + best_len - 1] !== scan_end1 || _win[match] !== _win[scan] || _win[++match] !== _win[scan + 1]) { continue; } /* The check at best_len-1 can be removed because it will be made * again later. (This heuristic is not always a win.) * It is not necessary to compare scan[2] and match[2] since they * are always equal when the other bytes match, given that * the hash keys are equal and that HASH_BITS >= 8. */ scan += 2; match++; // Assert(*scan == *match, "match[2]?"); /* We check for insufficient lookahead only every 8th comparison; * the 256th check will be made at strstart+258. */ do { /*jshint noempty:false*/ } while (_win[++scan] === _win[++match] && _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && scan < strend); // Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); len = MAX_MATCH - (strend - scan); scan = strend - MAX_MATCH; if (len > best_len) { s.match_start = cur_match; best_len = len; if (len >= nice_match) { break; } scan_end1 = _win[scan + best_len - 1]; scan_end = _win[scan + best_len]; } } while ((cur_match = prev[cur_match & wmask]) > limit && --chain_length !== 0); if (best_len <= s.lookahead) { return best_len; } return s.lookahead; } /* =========================================================================== * Fill the window when the lookahead becomes insufficient. * Updates strstart and lookahead. * * IN assertion: lookahead < MIN_LOOKAHEAD * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD * At least one byte has been read, or avail_in == 0; reads are * performed for at least two bytes (required for the zip translate_eol * option -- not supported here). */ function fill_window(s) { var _w_size = s.w_size; var p, n, m, more, str; //Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead"); do { more = s.window_size - s.lookahead - s.strstart; // JS ints have 32 bit, block below not needed /* Deal with !@#$% 64K limit: */ //if (sizeof(int) <= 2) { // if (more == 0 && s->strstart == 0 && s->lookahead == 0) { // more = wsize; // // } else if (more == (unsigned)(-1)) { // /* Very unlikely, but possible on 16 bit machine if // * strstart == 0 && lookahead == 1 (input done a byte at time) // */ // more--; // } //} /* If the window is almost full and there is insufficient lookahead, * move the upper half to the lower one to make room in the upper half. */ if (s.strstart >= _w_size + (_w_size - MIN_LOOKAHEAD)) { utils.arraySet(s.window, s.window, _w_size, _w_size, 0); s.match_start -= _w_size; s.strstart -= _w_size; /* we now have strstart >= MAX_DIST */ s.block_start -= _w_size; /* Slide the hash table (could be avoided with 32 bit values at the expense of memory usage). We slide even when level == 0 to keep the hash table consistent if we switch back to level > 0 later. (Using level 0 permanently is not an optimal usage of zlib, so we don't care about this pathological case.) */ n = s.hash_size; p = n; do { m = s.head[--p]; s.head[p] = (m >= _w_size ? m - _w_size : 0); } while (--n); n = _w_size; p = n; do { m = s.prev[--p]; s.prev[p] = (m >= _w_size ? m - _w_size : 0); /* If n is not on any hash chain, prev[n] is garbage but * its value will never be used. */ } while (--n); more += _w_size; } if (s.strm.avail_in === 0) { break; } /* If there was no sliding: * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 && * more == window_size - lookahead - strstart * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1) * => more >= window_size - 2*WSIZE + 2 * In the BIG_MEM or MMAP case (not yet supported), * window_size == input_size + MIN_LOOKAHEAD && * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD. * Otherwise, window_size == 2*WSIZE so more >= 2. * If there was sliding, more >= WSIZE. So in all cases, more >= 2. */ //Assert(more >= 2, "more < 2"); n = read_buf(s.strm, s.window, s.strstart + s.lookahead, more); s.lookahead += n; /* Initialize the hash value now that we have some input: */ if (s.lookahead + s.insert >= MIN_MATCH) { str = s.strstart - s.insert; s.ins_h = s.window[str]; /* UPDATE_HASH(s, s->ins_h, s->window[str + 1]); */ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + 1]) & s.hash_mask; //#if MIN_MATCH != 3 // Call update_hash() MIN_MATCH-3 more times //#endif while (s.insert) { /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; s.prev[str & s.w_mask] = s.head[s.ins_h]; s.head[s.ins_h] = str; str++; s.insert--; if (s.lookahead + s.insert < MIN_MATCH) { break; } } } /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage, * but this is not important since only literal bytes will be emitted. */ } while (s.lookahead < MIN_LOOKAHEAD && s.strm.avail_in !== 0); /* If the WIN_INIT bytes after the end of the current data have never been * written, then zero those bytes in order to avoid memory check reports of * the use of uninitialized (or uninitialised as Julian writes) bytes by * the longest match routines. Update the high water mark for the next * time through here. WIN_INIT is set to MAX_MATCH since the longest match * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead. */ // if (s.high_water < s.window_size) { // var curr = s.strstart + s.lookahead; // var init = 0; // // if (s.high_water < curr) { // /* Previous high water mark below current data -- zero WIN_INIT // * bytes or up to end of window, whichever is less. // */ // init = s.window_size - curr; // if (init > WIN_INIT) // init = WIN_INIT; // zmemzero(s->window + curr, (unsigned)init); // s->high_water = curr + init; // } // else if (s->high_water < (ulg)curr + WIN_INIT) { // /* High water mark at or above current data, but below current data // * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up // * to end of window, whichever is less. // */ // init = (ulg)curr + WIN_INIT - s->high_water; // if (init > s->window_size - s->high_water) // init = s->window_size - s->high_water; // zmemzero(s->window + s->high_water, (unsigned)init); // s->high_water += init; // } // } // // Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD, // "not enough room for search"); } /* =========================================================================== * Copy without compression as much as possible from the input stream, return * the current block state. * This function does not insert new strings in the dictionary since * uncompressible data is probably not useful. This function is used * only for the level=0 compression option. * NOTE: this function should be optimized to avoid extra copying from * window to pending_buf. */ function deflate_stored(s, flush) { /* Stored blocks are limited to 0xffff bytes, pending_buf is limited * to pending_buf_size, and each stored block has a 5 byte header: */ var max_block_size = 0xffff; if (max_block_size > s.pending_buf_size - 5) { max_block_size = s.pending_buf_size - 5; } /* Copy as much as possible from input to output: */ for (;;) { /* Fill the window as much as possible: */ if (s.lookahead <= 1) { //Assert(s->strstart < s->w_size+MAX_DIST(s) || // s->block_start >= (long)s->w_size, "slide too late"); // if (!(s.strstart < s.w_size + (s.w_size - MIN_LOOKAHEAD) || // s.block_start >= s.w_size)) { // throw new Error("slide too late"); // } fill_window(s); if (s.lookahead === 0 && flush === Z_NO_FLUSH) { return BS_NEED_MORE; } if (s.lookahead === 0) { break; } /* flush the current block */ } //Assert(s->block_start >= 0L, "block gone"); // if (s.block_start < 0) throw new Error("block gone"); s.strstart += s.lookahead; s.lookahead = 0; /* Emit a stored block if pending_buf will be full: */ var max_start = s.block_start + max_block_size; if (s.strstart === 0 || s.strstart >= max_start) { /* strstart == 0 is possible when wraparound on 16-bit machine */ s.lookahead = s.strstart - max_start; s.strstart = max_start; /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } /* Flush if we may have to slide, otherwise block_start may become * negative and the data will be gone: */ if (s.strstart - s.block_start >= (s.w_size - MIN_LOOKAHEAD)) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } } s.insert = 0; if (flush === Z_FINISH) { /*** FLUSH_BLOCK(s, 1); ***/ flush_block_only(s, true); if (s.strm.avail_out === 0) { return BS_FINISH_STARTED; } /***/ return BS_FINISH_DONE; } if (s.strstart > s.block_start) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } return BS_NEED_MORE; } /* =========================================================================== * Compress as much as possible from the input stream, return the current * block state. * This function does not perform lazy evaluation of matches and inserts * new strings in the dictionary only for unmatched strings or for short * matches. It is used only for the fast compression options. */ function deflate_fast(s, flush) { var hash_head; /* head of the hash chain */ var bflush; /* set if current block must be flushed */ for (;;) { /* Make sure that we always have enough lookahead, except * at the end of the input file. We need MAX_MATCH bytes * for the next match, plus MIN_MATCH bytes to insert the * string following the next match. */ if (s.lookahead < MIN_LOOKAHEAD) { fill_window(s); if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { return BS_NEED_MORE; } if (s.lookahead === 0) { break; /* flush the current block */ } } /* Insert the string window[strstart .. strstart+2] in the * dictionary, and set hash_head to the head of the hash chain: */ hash_head = 0/*NIL*/; if (s.lookahead >= MIN_MATCH) { /*** INSERT_STRING(s, s.strstart, hash_head); ***/ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; s.head[s.ins_h] = s.strstart; /***/ } /* Find the longest match, discarding those <= prev_length. * At this point we have always match_length < MIN_MATCH */ if (hash_head !== 0/*NIL*/ && ((s.strstart - hash_head) <= (s.w_size - MIN_LOOKAHEAD))) { /* To simplify the code, we prevent matches with the string * of window index 0 (in particular we have to avoid a match * of the string with itself at the start of the input file). */ s.match_length = longest_match(s, hash_head); /* longest_match() sets match_start */ } if (s.match_length >= MIN_MATCH) { // check_match(s, s.strstart, s.match_start, s.match_length); // for debug only /*** _tr_tally_dist(s, s.strstart - s.match_start, s.match_length - MIN_MATCH, bflush); ***/ bflush = trees._tr_tally(s, s.strstart - s.match_start, s.match_length - MIN_MATCH); s.lookahead -= s.match_length; /* Insert new strings in the hash table only if the match length * is not too large. This saves time but degrades compression. */ if (s.match_length <= s.max_lazy_match/*max_insert_length*/ && s.lookahead >= MIN_MATCH) { s.match_length--; /* string at strstart already in table */ do { s.strstart++; /*** INSERT_STRING(s, s.strstart, hash_head); ***/ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; s.head[s.ins_h] = s.strstart; /***/ /* strstart never exceeds WSIZE-MAX_MATCH, so there are * always MIN_MATCH bytes ahead. */ } while (--s.match_length !== 0); s.strstart++; } else { s.strstart += s.match_length; s.match_length = 0; s.ins_h = s.window[s.strstart]; /* UPDATE_HASH(s, s.ins_h, s.window[s.strstart+1]); */ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + 1]) & s.hash_mask; //#if MIN_MATCH != 3 // Call UPDATE_HASH() MIN_MATCH-3 more times //#endif /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not * matter since it will be recomputed at next deflate call. */ } } else { /* No match, output a literal byte */ //Tracevv((stderr,"%c", s.window[s.strstart])); /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ bflush = trees._tr_tally(s, 0, s.window[s.strstart]); s.lookahead--; s.strstart++; } if (bflush) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } } s.insert = ((s.strstart < (MIN_MATCH - 1)) ? s.strstart : MIN_MATCH - 1); if (flush === Z_FINISH) { /*** FLUSH_BLOCK(s, 1); ***/ flush_block_only(s, true); if (s.strm.avail_out === 0) { return BS_FINISH_STARTED; } /***/ return BS_FINISH_DONE; } if (s.last_lit) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } return BS_BLOCK_DONE; } /* =========================================================================== * Same as above, but achieves better compression. We use a lazy * evaluation for matches: a match is finally adopted only if there is * no better match at the next window position. */ function deflate_slow(s, flush) { var hash_head; /* head of hash chain */ var bflush; /* set if current block must be flushed */ var max_insert; /* Process the input block. */ for (;;) { /* Make sure that we always have enough lookahead, except * at the end of the input file. We need MAX_MATCH bytes * for the next match, plus MIN_MATCH bytes to insert the * string following the next match. */ if (s.lookahead < MIN_LOOKAHEAD) { fill_window(s); if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { return BS_NEED_MORE; } if (s.lookahead === 0) { break; } /* flush the current block */ } /* Insert the string window[strstart .. strstart+2] in the * dictionary, and set hash_head to the head of the hash chain: */ hash_head = 0/*NIL*/; if (s.lookahead >= MIN_MATCH) { /*** INSERT_STRING(s, s.strstart, hash_head); ***/ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; s.head[s.ins_h] = s.strstart; /***/ } /* Find the longest match, discarding those <= prev_length. */ s.prev_length = s.match_length; s.prev_match = s.match_start; s.match_length = MIN_MATCH - 1; if (hash_head !== 0/*NIL*/ && s.prev_length < s.max_lazy_match && s.strstart - hash_head <= (s.w_size - MIN_LOOKAHEAD)/*MAX_DIST(s)*/) { /* To simplify the code, we prevent matches with the string * of window index 0 (in particular we have to avoid a match * of the string with itself at the start of the input file). */ s.match_length = longest_match(s, hash_head); /* longest_match() sets match_start */ if (s.match_length <= 5 && (s.strategy === Z_FILTERED || (s.match_length === MIN_MATCH && s.strstart - s.match_start > 4096/*TOO_FAR*/))) { /* If prev_match is also MIN_MATCH, match_start is garbage * but we will ignore the current match anyway. */ s.match_length = MIN_MATCH - 1; } } /* If there was a match at the previous step and the current * match is not better, output the previous match: */ if (s.prev_length >= MIN_MATCH && s.match_length <= s.prev_length) { max_insert = s.strstart + s.lookahead - MIN_MATCH; /* Do not insert strings in hash table beyond this. */ //check_match(s, s.strstart-1, s.prev_match, s.prev_length); /***_tr_tally_dist(s, s.strstart - 1 - s.prev_match, s.prev_length - MIN_MATCH, bflush);***/ bflush = trees._tr_tally(s, s.strstart - 1 - s.prev_match, s.prev_length - MIN_MATCH); /* Insert in hash table all strings up to the end of the match. * strstart-1 and strstart are already inserted. If there is not * enough lookahead, the last two strings are not inserted in * the hash table. */ s.lookahead -= s.prev_length - 1; s.prev_length -= 2; do { if (++s.strstart <= max_insert) { /*** INSERT_STRING(s, s.strstart, hash_head); ***/ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; s.head[s.ins_h] = s.strstart; /***/ } } while (--s.prev_length !== 0); s.match_available = 0; s.match_length = MIN_MATCH - 1; s.strstart++; if (bflush) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } } else if (s.match_available) { /* If there was no match at the previous position, output a * single literal. If there was a match but the current match * is longer, truncate the previous match to a single literal. */ //Tracevv((stderr,"%c", s->window[s->strstart-1])); /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); if (bflush) { /*** FLUSH_BLOCK_ONLY(s, 0) ***/ flush_block_only(s, false); /***/ } s.strstart++; s.lookahead--; if (s.strm.avail_out === 0) { return BS_NEED_MORE; } } else { /* There is no previous match to compare with, wait for * the next step to decide. */ s.match_available = 1; s.strstart++; s.lookahead--; } } //Assert (flush != Z_NO_FLUSH, "no flush?"); if (s.match_available) { //Tracevv((stderr,"%c", s->window[s->strstart-1])); /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); s.match_available = 0; } s.insert = s.strstart < MIN_MATCH - 1 ? s.strstart : MIN_MATCH - 1; if (flush === Z_FINISH) { /*** FLUSH_BLOCK(s, 1); ***/ flush_block_only(s, true); if (s.strm.avail_out === 0) { return BS_FINISH_STARTED; } /***/ return BS_FINISH_DONE; } if (s.last_lit) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } return BS_BLOCK_DONE; } /* =========================================================================== * For Z_RLE, simply look for runs of bytes, generate matches only of distance * one. Do not maintain a hash table. (It will be regenerated if this run of * deflate switches away from Z_RLE.) */ function deflate_rle(s, flush) { var bflush; /* set if current block must be flushed */ var prev; /* byte at distance one to match */ var scan, strend; /* scan goes up to strend for length of run */ var _win = s.window; for (;;) { /* Make sure that we always have enough lookahead, except * at the end of the input file. We need MAX_MATCH bytes * for the longest run, plus one for the unrolled loop. */ if (s.lookahead <= MAX_MATCH) { fill_window(s); if (s.lookahead <= MAX_MATCH && flush === Z_NO_FLUSH) { return BS_NEED_MORE; } if (s.lookahead === 0) { break; } /* flush the current block */ } /* See how many times the previous byte repeats */ s.match_length = 0; if (s.lookahead >= MIN_MATCH && s.strstart > 0) { scan = s.strstart - 1; prev = _win[scan]; if (prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan]) { strend = s.strstart + MAX_MATCH; do { /*jshint noempty:false*/ } while (prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan] && scan < strend); s.match_length = MAX_MATCH - (strend - scan); if (s.match_length > s.lookahead) { s.match_length = s.lookahead; } } //Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan"); } /* Emit match if have run of MIN_MATCH or longer, else emit literal */ if (s.match_length >= MIN_MATCH) { //check_match(s, s.strstart, s.strstart - 1, s.match_length); /*** _tr_tally_dist(s, 1, s.match_length - MIN_MATCH, bflush); ***/ bflush = trees._tr_tally(s, 1, s.match_length - MIN_MATCH); s.lookahead -= s.match_length; s.strstart += s.match_length; s.match_length = 0; } else { /* No match, output a literal byte */ //Tracevv((stderr,"%c", s->window[s->strstart])); /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ bflush = trees._tr_tally(s, 0, s.window[s.strstart]); s.lookahead--; s.strstart++; } if (bflush) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } } s.insert = 0; if (flush === Z_FINISH) { /*** FLUSH_BLOCK(s, 1); ***/ flush_block_only(s, true); if (s.strm.avail_out === 0) { return BS_FINISH_STARTED; } /***/ return BS_FINISH_DONE; } if (s.last_lit) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } return BS_BLOCK_DONE; } /* =========================================================================== * For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table. * (It will be regenerated if this run of deflate switches away from Huffman.) */ function deflate_huff(s, flush) { var bflush; /* set if current block must be flushed */ for (;;) { /* Make sure that we have a literal to write. */ if (s.lookahead === 0) { fill_window(s); if (s.lookahead === 0) { if (flush === Z_NO_FLUSH) { return BS_NEED_MORE; } break; /* flush the current block */ } } /* Output a literal byte */ s.match_length = 0; //Tracevv((stderr,"%c", s->window[s->strstart])); /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ bflush = trees._tr_tally(s, 0, s.window[s.strstart]); s.lookahead--; s.strstart++; if (bflush) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } } s.insert = 0; if (flush === Z_FINISH) { /*** FLUSH_BLOCK(s, 1); ***/ flush_block_only(s, true); if (s.strm.avail_out === 0) { return BS_FINISH_STARTED; } /***/ return BS_FINISH_DONE; } if (s.last_lit) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } return BS_BLOCK_DONE; } /* Values for max_lazy_match, good_match and max_chain_length, depending on * the desired pack level (0..9). The values given below have been tuned to * exclude worst case performance for pathological files. Better values may be * found for specific files. */ function Config(good_length, max_lazy, nice_length, max_chain, func) { this.good_length = good_length; this.max_lazy = max_lazy; this.nice_length = nice_length; this.max_chain = max_chain; this.func = func; } var configuration_table; configuration_table = [ /* good lazy nice chain */ new Config(0, 0, 0, 0, deflate_stored), /* 0 store only */ new Config(4, 4, 8, 4, deflate_fast), /* 1 max speed, no lazy matches */ new Config(4, 5, 16, 8, deflate_fast), /* 2 */ new Config(4, 6, 32, 32, deflate_fast), /* 3 */ new Config(4, 4, 16, 16, deflate_slow), /* 4 lazy matches */ new Config(8, 16, 32, 32, deflate_slow), /* 5 */ new Config(8, 16, 128, 128, deflate_slow), /* 6 */ new Config(8, 32, 128, 256, deflate_slow), /* 7 */ new Config(32, 128, 258, 1024, deflate_slow), /* 8 */ new Config(32, 258, 258, 4096, deflate_slow) /* 9 max compression */ ]; /* =========================================================================== * Initialize the "longest match" routines for a new zlib stream */ function lm_init(s) { s.window_size = 2 * s.w_size; /*** CLEAR_HASH(s); ***/ zero(s.head); // Fill with NIL (= 0); /* Set the default configuration parameters: */ s.max_lazy_match = configuration_table[s.level].max_lazy; s.good_match = configuration_table[s.level].good_length; s.nice_match = configuration_table[s.level].nice_length; s.max_chain_length = configuration_table[s.level].max_chain; s.strstart = 0; s.block_start = 0; s.lookahead = 0; s.insert = 0; s.match_length = s.prev_length = MIN_MATCH - 1; s.match_available = 0; s.ins_h = 0; } function DeflateState() { this.strm = null; /* pointer back to this zlib stream */ this.status = 0; /* as the name implies */ this.pending_buf = null; /* output still pending */ this.pending_buf_size = 0; /* size of pending_buf */ this.pending_out = 0; /* next pending byte to output to the stream */ this.pending = 0; /* nb of bytes in the pending buffer */ this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ this.gzhead = null; /* gzip header information to write */ this.gzindex = 0; /* where in extra, name, or comment */ this.method = Z_DEFLATED; /* can only be DEFLATED */ this.last_flush = -1; /* value of flush param for previous deflate call */ this.w_size = 0; /* LZ77 window size (32K by default) */ this.w_bits = 0; /* log2(w_size) (8..16) */ this.w_mask = 0; /* w_size - 1 */ this.window = null; /* Sliding window. Input bytes are read into the second half of the window, * and move to the first half later to keep a dictionary of at least wSize * bytes. With this organization, matches are limited to a distance of * wSize-MAX_MATCH bytes, but this ensures that IO is always * performed with a length multiple of the block size. */ this.window_size = 0; /* Actual size of window: 2*wSize, except when the user input buffer * is directly used as sliding window. */ this.prev = null; /* Link to older string with same hash index. To limit the size of this * array to 64K, this link is maintained only for the last 32K strings. * An index in this array is thus a window index modulo 32K. */ this.head = null; /* Heads of the hash chains or NIL. */ this.ins_h = 0; /* hash index of string to be inserted */ this.hash_size = 0; /* number of elements in hash table */ this.hash_bits = 0; /* log2(hash_size) */ this.hash_mask = 0; /* hash_size-1 */ this.hash_shift = 0; /* Number of bits by which ins_h must be shifted at each input * step. It must be such that after MIN_MATCH steps, the oldest * byte no longer takes part in the hash key, that is: * hash_shift * MIN_MATCH >= hash_bits */ this.block_start = 0; /* Window position at the beginning of the current output block. Gets * negative when the window is moved backwards. */ this.match_length = 0; /* length of best match */ this.prev_match = 0; /* previous match */ this.match_available = 0; /* set if previous match exists */ this.strstart = 0; /* start of string to insert */ this.match_start = 0; /* start of matching string */ this.lookahead = 0; /* number of valid bytes ahead in window */ this.prev_length = 0; /* Length of the best match at previous step. Matches not greater than this * are discarded. This is used in the lazy match evaluation. */ this.max_chain_length = 0; /* To speed up deflation, hash chains are never searched beyond this * length. A higher limit improves compression ratio but degrades the * speed. */ this.max_lazy_match = 0; /* Attempt to find a better match only when the current match is strictly * smaller than this value. This mechanism is used only for compression * levels >= 4. */ // That's alias to max_lazy_match, don't use directly //this.max_insert_length = 0; /* Insert new strings in the hash table only if the match length is not * greater than this length. This saves time but degrades compression. * max_insert_length is used only for compression levels <= 3. */ this.level = 0; /* compression level (1..9) */ this.strategy = 0; /* favor or force Huffman coding*/ this.good_match = 0; /* Use a faster search when the previous match is longer than this */ this.nice_match = 0; /* Stop searching when current match exceeds this */ /* used by trees.c: */ /* Didn't use ct_data typedef below to suppress compiler warning */ // struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ // struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ // struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */ // Use flat array of DOUBLE size, with interleaved fata, // because JS does not support effective this.dyn_ltree = new utils.Buf16(HEAP_SIZE * 2); this.dyn_dtree = new utils.Buf16((2 * D_CODES + 1) * 2); this.bl_tree = new utils.Buf16((2 * BL_CODES + 1) * 2); zero(this.dyn_ltree); zero(this.dyn_dtree); zero(this.bl_tree); this.l_desc = null; /* desc. for literal tree */ this.d_desc = null; /* desc. for distance tree */ this.bl_desc = null; /* desc. for bit length tree */ //ush bl_count[MAX_BITS+1]; this.bl_count = new utils.Buf16(MAX_BITS + 1); /* number of codes at each bit length for an optimal tree */ //int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ this.heap = new utils.Buf16(2 * L_CODES + 1); /* heap used to build the Huffman trees */ zero(this.heap); this.heap_len = 0; /* number of elements in the heap */ this.heap_max = 0; /* element of largest frequency */ /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. * The same heap array is used to build all trees. */ this.depth = new utils.Buf16(2 * L_CODES + 1); //uch depth[2*L_CODES+1]; zero(this.depth); /* Depth of each subtree used as tie breaker for trees of equal frequency */ this.l_buf = 0; /* buffer index for literals or lengths */ this.lit_bufsize = 0; /* Size of match buffer for literals/lengths. There are 4 reasons for * limiting lit_bufsize to 64K: * - frequencies can be kept in 16 bit counters * - if compression is not successful for the first block, all input * data is still in the window so we can still emit a stored block even * when input comes from standard input. (This can also be done for * all blocks if lit_bufsize is not greater than 32K.) * - if compression is not successful for a file smaller than 64K, we can * even emit a stored file instead of a stored block (saving 5 bytes). * This is applicable only for zip (not gzip or zlib). * - creating new Huffman trees less frequently may not provide fast * adaptation to changes in the input data statistics. (Take for * example a binary file with poorly compressible code followed by * a highly compressible string table.) Smaller buffer sizes give * fast adaptation but have of course the overhead of transmitting * trees more frequently. * - I can't count above 4 */ this.last_lit = 0; /* running index in l_buf */ this.d_buf = 0; /* Buffer index for distances. To simplify the code, d_buf and l_buf have * the same number of elements. To use different lengths, an extra flag * array would be necessary. */ this.opt_len = 0; /* bit length of current block with optimal trees */ this.static_len = 0; /* bit length of current block with static trees */ this.matches = 0; /* number of string matches in current block */ this.insert = 0; /* bytes at end of window left to insert */ this.bi_buf = 0; /* Output buffer. bits are inserted starting at the bottom (least * significant bits). */ this.bi_valid = 0; /* Number of valid bits in bi_buf. All bits above the last valid bit * are always zero. */ // Used for window memory init. We safely ignore it for JS. That makes // sense only for pointers and memory check tools. //this.high_water = 0; /* High water mark offset in window for initialized bytes -- bytes above * this are set to zero in order to avoid memory check warnings when * longest match routines access bytes past the input. This is then * updated to the new high water mark. */ } function deflateResetKeep(strm) { var s; if (!strm || !strm.state) { return err(strm, Z_STREAM_ERROR); } strm.total_in = strm.total_out = 0; strm.data_type = Z_UNKNOWN; s = strm.state; s.pending = 0; s.pending_out = 0; if (s.wrap < 0) { s.wrap = -s.wrap; /* was made negative by deflate(..., Z_FINISH); */ } s.status = (s.wrap ? INIT_STATE : BUSY_STATE); strm.adler = (s.wrap === 2) ? 0 // crc32(0, Z_NULL, 0) : 1; // adler32(0, Z_NULL, 0) s.last_flush = Z_NO_FLUSH; trees._tr_init(s); return Z_OK; } function deflateReset(strm) { var ret = deflateResetKeep(strm); if (ret === Z_OK) { lm_init(strm.state); } return ret; } function deflateSetHeader(strm, head) { if (!strm || !strm.state) { return Z_STREAM_ERROR; } if (strm.state.wrap !== 2) { return Z_STREAM_ERROR; } strm.state.gzhead = head; return Z_OK; } function deflateInit2(strm, level, method, windowBits, memLevel, strategy) { if (!strm) { // === Z_NULL return Z_STREAM_ERROR; } var wrap = 1; if (level === Z_DEFAULT_COMPRESSION) { level = 6; } if (windowBits < 0) { /* suppress zlib wrapper */ wrap = 0; windowBits = -windowBits; } else if (windowBits > 15) { wrap = 2; /* write gzip wrapper instead */ windowBits -= 16; } if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method !== Z_DEFLATED || windowBits < 8 || windowBits > 15 || level < 0 || level > 9 || strategy < 0 || strategy > Z_FIXED) { return err(strm, Z_STREAM_ERROR); } if (windowBits === 8) { windowBits = 9; } /* until 256-byte window bug fixed */ var s = new DeflateState(); strm.state = s; s.strm = strm; s.wrap = wrap; s.gzhead = null; s.w_bits = windowBits; s.w_size = 1 << s.w_bits; s.w_mask = s.w_size - 1; s.hash_bits = memLevel + 7; s.hash_size = 1 << s.hash_bits; s.hash_mask = s.hash_size - 1; s.hash_shift = ~~((s.hash_bits + MIN_MATCH - 1) / MIN_MATCH); s.window = new utils.Buf8(s.w_size * 2); s.head = new utils.Buf16(s.hash_size); s.prev = new utils.Buf16(s.w_size); // Don't need mem init magic for JS. //s.high_water = 0; /* nothing written to s->window yet */ s.lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ s.pending_buf_size = s.lit_bufsize * 4; //overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); //s->pending_buf = (uchf *) overlay; s.pending_buf = new utils.Buf8(s.pending_buf_size); // It is offset from `s.pending_buf` (size is `s.lit_bufsize * 2`) //s->d_buf = overlay + s->lit_bufsize/sizeof(ush); s.d_buf = 1 * s.lit_bufsize; //s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; s.l_buf = (1 + 2) * s.lit_bufsize; s.level = level; s.strategy = strategy; s.method = method; return deflateReset(strm); } function deflateInit(strm, level) { return deflateInit2(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY); } function deflate(strm, flush) { var old_flush, s; var beg, val; // for gzip header write only if (!strm || !strm.state || flush > Z_BLOCK || flush < 0) { return strm ? err(strm, Z_STREAM_ERROR) : Z_STREAM_ERROR; } s = strm.state; if (!strm.output || (!strm.input && strm.avail_in !== 0) || (s.status === FINISH_STATE && flush !== Z_FINISH)) { return err(strm, (strm.avail_out === 0) ? Z_BUF_ERROR : Z_STREAM_ERROR); } s.strm = strm; /* just in case */ old_flush = s.last_flush; s.last_flush = flush; /* Write the header */ if (s.status === INIT_STATE) { if (s.wrap === 2) { // GZIP header strm.adler = 0; //crc32(0L, Z_NULL, 0); put_byte(s, 31); put_byte(s, 139); put_byte(s, 8); if (!s.gzhead) { // s->gzhead == Z_NULL put_byte(s, 0); put_byte(s, 0); put_byte(s, 0); put_byte(s, 0); put_byte(s, 0); put_byte(s, s.level === 9 ? 2 : (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? 4 : 0)); put_byte(s, OS_CODE); s.status = BUSY_STATE; } else { put_byte(s, (s.gzhead.text ? 1 : 0) + (s.gzhead.hcrc ? 2 : 0) + (!s.gzhead.extra ? 0 : 4) + (!s.gzhead.name ? 0 : 8) + (!s.gzhead.comment ? 0 : 16) ); put_byte(s, s.gzhead.time & 0xff); put_byte(s, (s.gzhead.time >> 8) & 0xff); put_byte(s, (s.gzhead.time >> 16) & 0xff); put_byte(s, (s.gzhead.time >> 24) & 0xff); put_byte(s, s.level === 9 ? 2 : (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? 4 : 0)); put_byte(s, s.gzhead.os & 0xff); if (s.gzhead.extra && s.gzhead.extra.length) { put_byte(s, s.gzhead.extra.length & 0xff); put_byte(s, (s.gzhead.extra.length >> 8) & 0xff); } if (s.gzhead.hcrc) { strm.adler = crc32(strm.adler, s.pending_buf, s.pending, 0); } s.gzindex = 0; s.status = EXTRA_STATE; } } else // DEFLATE header { var header = (Z_DEFLATED + ((s.w_bits - 8) << 4)) << 8; var level_flags = -1; if (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2) { level_flags = 0; } else if (s.level < 6) { level_flags = 1; } else if (s.level === 6) { level_flags = 2; } else { level_flags = 3; } header |= (level_flags << 6); if (s.strstart !== 0) { header |= PRESET_DICT; } header += 31 - (header % 31); s.status = BUSY_STATE; putShortMSB(s, header); /* Save the adler32 of the preset dictionary: */ if (s.strstart !== 0) { putShortMSB(s, strm.adler >>> 16); putShortMSB(s, strm.adler & 0xffff); } strm.adler = 1; // adler32(0L, Z_NULL, 0); } } //#ifdef GZIP if (s.status === EXTRA_STATE) { if (s.gzhead.extra/* != Z_NULL*/) { beg = s.pending; /* start of bytes to update crc */ while (s.gzindex < (s.gzhead.extra.length & 0xffff)) { if (s.pending === s.pending_buf_size) { if (s.gzhead.hcrc && s.pending > beg) { strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); } flush_pending(strm); beg = s.pending; if (s.pending === s.pending_buf_size) { break; } } put_byte(s, s.gzhead.extra[s.gzindex] & 0xff); s.gzindex++; } if (s.gzhead.hcrc && s.pending > beg) { strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); } if (s.gzindex === s.gzhead.extra.length) { s.gzindex = 0; s.status = NAME_STATE; } } else { s.status = NAME_STATE; } } if (s.status === NAME_STATE) { if (s.gzhead.name/* != Z_NULL*/) { beg = s.pending; /* start of bytes to update crc */ //int val; do { if (s.pending === s.pending_buf_size) { if (s.gzhead.hcrc && s.pending > beg) { strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); } flush_pending(strm); beg = s.pending; if (s.pending === s.pending_buf_size) { val = 1; break; } } // JS specific: little magic to add zero terminator to end of string if (s.gzindex < s.gzhead.name.length) { val = s.gzhead.name.charCodeAt(s.gzindex++) & 0xff; } else { val = 0; } put_byte(s, val); } while (val !== 0); if (s.gzhead.hcrc && s.pending > beg) { strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); } if (val === 0) { s.gzindex = 0; s.status = COMMENT_STATE; } } else { s.status = COMMENT_STATE; } } if (s.status === COMMENT_STATE) { if (s.gzhead.comment/* != Z_NULL*/) { beg = s.pending; /* start of bytes to update crc */ //int val; do { if (s.pending === s.pending_buf_size) { if (s.gzhead.hcrc && s.pending > beg) { strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); } flush_pending(strm); beg = s.pending; if (s.pending === s.pending_buf_size) { val = 1; break; } } // JS specific: little magic to add zero terminator to end of string if (s.gzindex < s.gzhead.comment.length) { val = s.gzhead.comment.charCodeAt(s.gzindex++) & 0xff; } else { val = 0; } put_byte(s, val); } while (val !== 0); if (s.gzhead.hcrc && s.pending > beg) { strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); } if (val === 0) { s.status = HCRC_STATE; } } else { s.status = HCRC_STATE; } } if (s.status === HCRC_STATE) { if (s.gzhead.hcrc) { if (s.pending + 2 > s.pending_buf_size) { flush_pending(strm); } if (s.pending + 2 <= s.pending_buf_size) { put_byte(s, strm.adler & 0xff); put_byte(s, (strm.adler >> 8) & 0xff); strm.adler = 0; //crc32(0L, Z_NULL, 0); s.status = BUSY_STATE; } } else { s.status = BUSY_STATE; } } //#endif /* Flush as much pending output as possible */ if (s.pending !== 0) { flush_pending(strm); if (strm.avail_out === 0) { /* Since avail_out is 0, deflate will be called again with * more output space, but possibly with both pending and * avail_in equal to zero. There won't be anything to do, * but this is not an error situation so make sure we * return OK instead of BUF_ERROR at next call of deflate: */ s.last_flush = -1; return Z_OK; } /* Make sure there is something to do and avoid duplicate consecutive * flushes. For repeated and useless calls with Z_FINISH, we keep * returning Z_STREAM_END instead of Z_BUF_ERROR. */ } else if (strm.avail_in === 0 && rank(flush) <= rank(old_flush) && flush !== Z_FINISH) { return err(strm, Z_BUF_ERROR); } /* User must not provide more input after the first FINISH: */ if (s.status === FINISH_STATE && strm.avail_in !== 0) { return err(strm, Z_BUF_ERROR); } /* Start a new block or continue the current one. */ if (strm.avail_in !== 0 || s.lookahead !== 0 || (flush !== Z_NO_FLUSH && s.status !== FINISH_STATE)) { var bstate = (s.strategy === Z_HUFFMAN_ONLY) ? deflate_huff(s, flush) : (s.strategy === Z_RLE ? deflate_rle(s, flush) : configuration_table[s.level].func(s, flush)); if (bstate === BS_FINISH_STARTED || bstate === BS_FINISH_DONE) { s.status = FINISH_STATE; } if (bstate === BS_NEED_MORE || bstate === BS_FINISH_STARTED) { if (strm.avail_out === 0) { s.last_flush = -1; /* avoid BUF_ERROR next call, see above */ } return Z_OK; /* If flush != Z_NO_FLUSH && avail_out == 0, the next call * of deflate should use the same flush parameter to make sure * that the flush is complete. So we don't have to output an * empty block here, this will be done at next call. This also * ensures that for a very small output buffer, we emit at most * one empty block. */ } if (bstate === BS_BLOCK_DONE) { if (flush === Z_PARTIAL_FLUSH) { trees._tr_align(s); } else if (flush !== Z_BLOCK) { /* FULL_FLUSH or SYNC_FLUSH */ trees._tr_stored_block(s, 0, 0, false); /* For a full flush, this empty block will be recognized * as a special marker by inflate_sync(). */ if (flush === Z_FULL_FLUSH) { /*** CLEAR_HASH(s); ***/ /* forget history */ zero(s.head); // Fill with NIL (= 0); if (s.lookahead === 0) { s.strstart = 0; s.block_start = 0; s.insert = 0; } } } flush_pending(strm); if (strm.avail_out === 0) { s.last_flush = -1; /* avoid BUF_ERROR at next call, see above */ return Z_OK; } } } //Assert(strm->avail_out > 0, "bug2"); //if (strm.avail_out <= 0) { throw new Error("bug2");} if (flush !== Z_FINISH) { return Z_OK; } if (s.wrap <= 0) { return Z_STREAM_END; } /* Write the trailer */ if (s.wrap === 2) { put_byte(s, strm.adler & 0xff); put_byte(s, (strm.adler >> 8) & 0xff); put_byte(s, (strm.adler >> 16) & 0xff); put_byte(s, (strm.adler >> 24) & 0xff); put_byte(s, strm.total_in & 0xff); put_byte(s, (strm.total_in >> 8) & 0xff); put_byte(s, (strm.total_in >> 16) & 0xff); put_byte(s, (strm.total_in >> 24) & 0xff); } else { putShortMSB(s, strm.adler >>> 16); putShortMSB(s, strm.adler & 0xffff); } flush_pending(strm); /* If avail_out is zero, the application will call deflate again * to flush the rest. */ if (s.wrap > 0) { s.wrap = -s.wrap; } /* write the trailer only once! */ return s.pending !== 0 ? Z_OK : Z_STREAM_END; } function deflateEnd(strm) { var status; if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { return Z_STREAM_ERROR; } status = strm.state.status; if (status !== INIT_STATE && status !== EXTRA_STATE && status !== NAME_STATE && status !== COMMENT_STATE && status !== HCRC_STATE && status !== BUSY_STATE && status !== FINISH_STATE ) { return err(strm, Z_STREAM_ERROR); } strm.state = null; return status === BUSY_STATE ? err(strm, Z_DATA_ERROR) : Z_OK; } /* ========================================================================= * Initializes the compression dictionary from the given byte * sequence without producing any compressed output. */ function deflateSetDictionary(strm, dictionary) { var dictLength = dictionary.length; var s; var str, n; var wrap; var avail; var next; var input; var tmpDict; if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { return Z_STREAM_ERROR; } s = strm.state; wrap = s.wrap; if (wrap === 2 || (wrap === 1 && s.status !== INIT_STATE) || s.lookahead) { return Z_STREAM_ERROR; } /* when using zlib wrappers, compute Adler-32 for provided dictionary */ if (wrap === 1) { /* adler32(strm->adler, dictionary, dictLength); */ strm.adler = adler32(strm.adler, dictionary, dictLength, 0); } s.wrap = 0; /* avoid computing Adler-32 in read_buf */ /* if dictionary would fill window, just replace the history */ if (dictLength >= s.w_size) { if (wrap === 0) { /* already empty otherwise */ /*** CLEAR_HASH(s); ***/ zero(s.head); // Fill with NIL (= 0); s.strstart = 0; s.block_start = 0; s.insert = 0; } /* use the tail */ // dictionary = dictionary.slice(dictLength - s.w_size); tmpDict = new utils.Buf8(s.w_size); utils.arraySet(tmpDict, dictionary, dictLength - s.w_size, s.w_size, 0); dictionary = tmpDict; dictLength = s.w_size; } /* insert dictionary into window and hash */ avail = strm.avail_in; next = strm.next_in; input = strm.input; strm.avail_in = dictLength; strm.next_in = 0; strm.input = dictionary; fill_window(s); while (s.lookahead >= MIN_MATCH) { str = s.strstart; n = s.lookahead - (MIN_MATCH - 1); do { /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; s.prev[str & s.w_mask] = s.head[s.ins_h]; s.head[s.ins_h] = str; str++; } while (--n); s.strstart = str; s.lookahead = MIN_MATCH - 1; fill_window(s); } s.strstart += s.lookahead; s.block_start = s.strstart; s.insert = s.lookahead; s.lookahead = 0; s.match_length = s.prev_length = MIN_MATCH - 1; s.match_available = 0; strm.next_in = next; strm.input = input; strm.avail_in = avail; s.wrap = wrap; return Z_OK; } exports.deflateInit = deflateInit; exports.deflateInit2 = deflateInit2; exports.deflateReset = deflateReset; exports.deflateResetKeep = deflateResetKeep; exports.deflateSetHeader = deflateSetHeader; exports.deflate = deflate; exports.deflateEnd = deflateEnd; exports.deflateSetDictionary = deflateSetDictionary; exports.deflateInfo = 'pako deflate (from Nodeca project)'; /* Not implemented exports.deflateBound = deflateBound; exports.deflateCopy = deflateCopy; exports.deflateParams = deflateParams; exports.deflatePending = deflatePending; exports.deflatePrime = deflatePrime; exports.deflateTune = deflateTune; */ },{"../utils/common":82,"./adler32":84,"./crc32":86,"./messages":92,"./trees":93}],88:[function(_dereq_,module,exports){ 'use strict'; // (C) 1995-2013 Jean-loup Gailly and Mark Adler // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // 3. This notice may not be removed or altered from any source distribution. function GZheader() { /* true if compressed data believed to be text */ this.text = 0; /* modification time */ this.time = 0; /* extra flags (not used when writing a gzip file) */ this.xflags = 0; /* operating system */ this.os = 0; /* pointer to extra field or Z_NULL if none */ this.extra = null; /* extra field length (valid if extra != Z_NULL) */ this.extra_len = 0; // Actually, we don't need it in JS, // but leave for few code modifications // // Setup limits is not necessary because in js we should not preallocate memory // for inflate use constant limit in 65536 bytes // /* space at extra (only when reading header) */ // this.extra_max = 0; /* pointer to zero-terminated file name or Z_NULL */ this.name = ''; /* space at name (only when reading header) */ // this.name_max = 0; /* pointer to zero-terminated comment or Z_NULL */ this.comment = ''; /* space at comment (only when reading header) */ // this.comm_max = 0; /* true if there was or will be a header crc */ this.hcrc = 0; /* true when done reading gzip header (not used when writing a gzip file) */ this.done = false; } module.exports = GZheader; },{}],89:[function(_dereq_,module,exports){ 'use strict'; // (C) 1995-2013 Jean-loup Gailly and Mark Adler // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // 3. This notice may not be removed or altered from any source distribution. // See state defs from inflate.js var BAD = 30; /* got a data error -- remain here until reset */ var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ /* Decode literal, length, and distance codes and write out the resulting literal and match bytes until either not enough input or output is available, an end-of-block is encountered, or a data error is encountered. When large enough input and output buffers are supplied to inflate(), for example, a 16K input buffer and a 64K output buffer, more than 95% of the inflate execution time is spent in this routine. Entry assumptions: state.mode === LEN strm.avail_in >= 6 strm.avail_out >= 258 start >= strm.avail_out state.bits < 8 On return, state.mode is one of: LEN -- ran out of enough output space or enough available input TYPE -- reached end of block code, inflate() to interpret next block BAD -- error in block data Notes: - The maximum input bits used by a length/distance pair is 15 bits for the length code, 5 bits for the length extra, 15 bits for the distance code, and 13 bits for the distance extra. This totals 48 bits, or six bytes. Therefore if strm.avail_in >= 6, then there is enough input to avoid checking for available input while decoding. - The maximum bytes that a single length/distance pair can output is 258 bytes, which is the maximum length that can be coded. inflate_fast() requires strm.avail_out >= 258 for each loop to avoid checking for output space. */ module.exports = function inflate_fast(strm, start) { var state; var _in; /* local strm.input */ var last; /* have enough input while in < last */ var _out; /* local strm.output */ var beg; /* inflate()'s initial strm.output */ var end; /* while out < end, enough space available */ //#ifdef INFLATE_STRICT var dmax; /* maximum distance from zlib header */ //#endif var wsize; /* window size or zero if not using window */ var whave; /* valid bytes in the window */ var wnext; /* window write index */ // Use `s_window` instead `window`, avoid conflict with instrumentation tools var s_window; /* allocated sliding window, if wsize != 0 */ var hold; /* local strm.hold */ var bits; /* local strm.bits */ var lcode; /* local strm.lencode */ var dcode; /* local strm.distcode */ var lmask; /* mask for first level of length codes */ var dmask; /* mask for first level of distance codes */ var here; /* retrieved table entry */ var op; /* code bits, operation, extra bits, or */ /* window position, window bytes to copy */ var len; /* match length, unused bytes */ var dist; /* match distance */ var from; /* where to copy match from */ var from_source; var input, output; // JS specific, because we have no pointers /* copy state to local variables */ state = strm.state; //here = state.here; _in = strm.next_in; input = strm.input; last = _in + (strm.avail_in - 5); _out = strm.next_out; output = strm.output; beg = _out - (start - strm.avail_out); end = _out + (strm.avail_out - 257); //#ifdef INFLATE_STRICT dmax = state.dmax; //#endif wsize = state.wsize; whave = state.whave; wnext = state.wnext; s_window = state.window; hold = state.hold; bits = state.bits; lcode = state.lencode; dcode = state.distcode; lmask = (1 << state.lenbits) - 1; dmask = (1 << state.distbits) - 1; /* decode literals and length/distances until end-of-block or not enough input data or output space */ top: do { if (bits < 15) { hold += input[_in++] << bits; bits += 8; hold += input[_in++] << bits; bits += 8; } here = lcode[hold & lmask]; dolen: for (;;) { // Goto emulation op = here >>> 24/*here.bits*/; hold >>>= op; bits -= op; op = (here >>> 16) & 0xff/*here.op*/; if (op === 0) { /* literal */ //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? // "inflate: literal '%c'\n" : // "inflate: literal 0x%02x\n", here.val)); output[_out++] = here & 0xffff/*here.val*/; } else if (op & 16) { /* length base */ len = here & 0xffff/*here.val*/; op &= 15; /* number of extra bits */ if (op) { if (bits < op) { hold += input[_in++] << bits; bits += 8; } len += hold & ((1 << op) - 1); hold >>>= op; bits -= op; } //Tracevv((stderr, "inflate: length %u\n", len)); if (bits < 15) { hold += input[_in++] << bits; bits += 8; hold += input[_in++] << bits; bits += 8; } here = dcode[hold & dmask]; dodist: for (;;) { // goto emulation op = here >>> 24/*here.bits*/; hold >>>= op; bits -= op; op = (here >>> 16) & 0xff/*here.op*/; if (op & 16) { /* distance base */ dist = here & 0xffff/*here.val*/; op &= 15; /* number of extra bits */ if (bits < op) { hold += input[_in++] << bits; bits += 8; if (bits < op) { hold += input[_in++] << bits; bits += 8; } } dist += hold & ((1 << op) - 1); //#ifdef INFLATE_STRICT if (dist > dmax) { strm.msg = 'invalid distance too far back'; state.mode = BAD; break top; } //#endif hold >>>= op; bits -= op; //Tracevv((stderr, "inflate: distance %u\n", dist)); op = _out - beg; /* max distance in output */ if (dist > op) { /* see if copy from window */ op = dist - op; /* distance back in window */ if (op > whave) { if (state.sane) { strm.msg = 'invalid distance too far back'; state.mode = BAD; break top; } // (!) This block is disabled in zlib defaults, // don't enable it for binary compatibility //#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR // if (len <= op - whave) { // do { // output[_out++] = 0; // } while (--len); // continue top; // } // len -= op - whave; // do { // output[_out++] = 0; // } while (--op > whave); // if (op === 0) { // from = _out - dist; // do { // output[_out++] = output[from++]; // } while (--len); // continue top; // } //#endif } from = 0; // window index from_source = s_window; if (wnext === 0) { /* very common case */ from += wsize - op; if (op < len) { /* some from window */ len -= op; do { output[_out++] = s_window[from++]; } while (--op); from = _out - dist; /* rest from output */ from_source = output; } } else if (wnext < op) { /* wrap around window */ from += wsize + wnext - op; op -= wnext; if (op < len) { /* some from end of window */ len -= op; do { output[_out++] = s_window[from++]; } while (--op); from = 0; if (wnext < len) { /* some from start of window */ op = wnext; len -= op; do { output[_out++] = s_window[from++]; } while (--op); from = _out - dist; /* rest from output */ from_source = output; } } } else { /* contiguous in window */ from += wnext - op; if (op < len) { /* some from window */ len -= op; do { output[_out++] = s_window[from++]; } while (--op); from = _out - dist; /* rest from output */ from_source = output; } } while (len > 2) { output[_out++] = from_source[from++]; output[_out++] = from_source[from++]; output[_out++] = from_source[from++]; len -= 3; } if (len) { output[_out++] = from_source[from++]; if (len > 1) { output[_out++] = from_source[from++]; } } } else { from = _out - dist; /* copy direct from output */ do { /* minimum length is three */ output[_out++] = output[from++]; output[_out++] = output[from++]; output[_out++] = output[from++]; len -= 3; } while (len > 2); if (len) { output[_out++] = output[from++]; if (len > 1) { output[_out++] = output[from++]; } } } } else if ((op & 64) === 0) { /* 2nd level distance code */ here = dcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; continue dodist; } else { strm.msg = 'invalid distance code'; state.mode = BAD; break top; } break; // need to emulate goto via "continue" } } else if ((op & 64) === 0) { /* 2nd level length code */ here = lcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; continue dolen; } else if (op & 32) { /* end-of-block */ //Tracevv((stderr, "inflate: end of block\n")); state.mode = TYPE; break top; } else { strm.msg = 'invalid literal/length code'; state.mode = BAD; break top; } break; // need to emulate goto via "continue" } } while (_in < last && _out < end); /* return unused bytes (on entry, bits < 8, so in won't go too far back) */ len = bits >> 3; _in -= len; bits -= len << 3; hold &= (1 << bits) - 1; /* update state and return */ strm.next_in = _in; strm.next_out = _out; strm.avail_in = (_in < last ? 5 + (last - _in) : 5 - (_in - last)); strm.avail_out = (_out < end ? 257 + (end - _out) : 257 - (_out - end)); state.hold = hold; state.bits = bits; return; }; },{}],90:[function(_dereq_,module,exports){ 'use strict'; // (C) 1995-2013 Jean-loup Gailly and Mark Adler // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // 3. This notice may not be removed or altered from any source distribution. var utils = _dereq_('../utils/common'); var adler32 = _dereq_('./adler32'); var crc32 = _dereq_('./crc32'); var inflate_fast = _dereq_('./inffast'); var inflate_table = _dereq_('./inftrees'); var CODES = 0; var LENS = 1; var DISTS = 2; /* Public constants ==========================================================*/ /* ===========================================================================*/ /* Allowed flush values; see deflate() and inflate() below for details */ //var Z_NO_FLUSH = 0; //var Z_PARTIAL_FLUSH = 1; //var Z_SYNC_FLUSH = 2; //var Z_FULL_FLUSH = 3; var Z_FINISH = 4; var Z_BLOCK = 5; var Z_TREES = 6; /* Return codes for the compression/decompression functions. Negative values * are errors, positive values are used for special but normal events. */ var Z_OK = 0; var Z_STREAM_END = 1; var Z_NEED_DICT = 2; //var Z_ERRNO = -1; var Z_STREAM_ERROR = -2; var Z_DATA_ERROR = -3; var Z_MEM_ERROR = -4; var Z_BUF_ERROR = -5; //var Z_VERSION_ERROR = -6; /* The deflate compression method */ var Z_DEFLATED = 8; /* STATES ====================================================================*/ /* ===========================================================================*/ var HEAD = 1; /* i: waiting for magic header */ var FLAGS = 2; /* i: waiting for method and flags (gzip) */ var TIME = 3; /* i: waiting for modification time (gzip) */ var OS = 4; /* i: waiting for extra flags and operating system (gzip) */ var EXLEN = 5; /* i: waiting for extra length (gzip) */ var EXTRA = 6; /* i: waiting for extra bytes (gzip) */ var NAME = 7; /* i: waiting for end of file name (gzip) */ var COMMENT = 8; /* i: waiting for end of comment (gzip) */ var HCRC = 9; /* i: waiting for header crc (gzip) */ var DICTID = 10; /* i: waiting for dictionary check value */ var DICT = 11; /* waiting for inflateSetDictionary() call */ var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ var TYPEDO = 13; /* i: same, but skip check to exit inflate on new block */ var STORED = 14; /* i: waiting for stored size (length and complement) */ var COPY_ = 15; /* i/o: same as COPY below, but only first time in */ var COPY = 16; /* i/o: waiting for input or output to copy stored block */ var TABLE = 17; /* i: waiting for dynamic block table lengths */ var LENLENS = 18; /* i: waiting for code length code lengths */ var CODELENS = 19; /* i: waiting for length/lit and distance code lengths */ var LEN_ = 20; /* i: same as LEN below, but only first time in */ var LEN = 21; /* i: waiting for length/lit/eob code */ var LENEXT = 22; /* i: waiting for length extra bits */ var DIST = 23; /* i: waiting for distance code */ var DISTEXT = 24; /* i: waiting for distance extra bits */ var MATCH = 25; /* o: waiting for output space to copy string */ var LIT = 26; /* o: waiting for output space to write literal */ var CHECK = 27; /* i: waiting for 32-bit check value */ var LENGTH = 28; /* i: waiting for 32-bit length (gzip) */ var DONE = 29; /* finished check, done -- remain here until reset */ var BAD = 30; /* got a data error -- remain here until reset */ var MEM = 31; /* got an inflate() memory error -- remain here until reset */ var SYNC = 32; /* looking for synchronization bytes to restart inflate() */ /* ===========================================================================*/ var ENOUGH_LENS = 852; var ENOUGH_DISTS = 592; //var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); var MAX_WBITS = 15; /* 32K LZ77 window */ var DEF_WBITS = MAX_WBITS; function zswap32(q) { return (((q >>> 24) & 0xff) + ((q >>> 8) & 0xff00) + ((q & 0xff00) << 8) + ((q & 0xff) << 24)); } function InflateState() { this.mode = 0; /* current inflate mode */ this.last = false; /* true if processing last block */ this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ this.havedict = false; /* true if dictionary provided */ this.flags = 0; /* gzip header method and flags (0 if zlib) */ this.dmax = 0; /* zlib header max distance (INFLATE_STRICT) */ this.check = 0; /* protected copy of check value */ this.total = 0; /* protected copy of output count */ // TODO: may be {} this.head = null; /* where to save gzip header information */ /* sliding window */ this.wbits = 0; /* log base 2 of requested window size */ this.wsize = 0; /* window size or zero if not using window */ this.whave = 0; /* valid bytes in the window */ this.wnext = 0; /* window write index */ this.window = null; /* allocated sliding window, if needed */ /* bit accumulator */ this.hold = 0; /* input bit accumulator */ this.bits = 0; /* number of bits in "in" */ /* for string and stored block copying */ this.length = 0; /* literal or length of data to copy */ this.offset = 0; /* distance back to copy string from */ /* for table and code decoding */ this.extra = 0; /* extra bits needed */ /* fixed and dynamic code tables */ this.lencode = null; /* starting table for length/literal codes */ this.distcode = null; /* starting table for distance codes */ this.lenbits = 0; /* index bits for lencode */ this.distbits = 0; /* index bits for distcode */ /* dynamic table building */ this.ncode = 0; /* number of code length code lengths */ this.nlen = 0; /* number of length code lengths */ this.ndist = 0; /* number of distance code lengths */ this.have = 0; /* number of code lengths in lens[] */ this.next = null; /* next available space in codes[] */ this.lens = new utils.Buf16(320); /* temporary storage for code lengths */ this.work = new utils.Buf16(288); /* work area for code table building */ /* because we don't have pointers in js, we use lencode and distcode directly as buffers so we don't need codes */ //this.codes = new utils.Buf32(ENOUGH); /* space for code tables */ this.lendyn = null; /* dynamic table for length/literal codes (JS specific) */ this.distdyn = null; /* dynamic table for distance codes (JS specific) */ this.sane = 0; /* if false, allow invalid distance too far */ this.back = 0; /* bits back of last unprocessed length/lit */ this.was = 0; /* initial length of match */ } function inflateResetKeep(strm) { var state; if (!strm || !strm.state) { return Z_STREAM_ERROR; } state = strm.state; strm.total_in = strm.total_out = state.total = 0; strm.msg = ''; /*Z_NULL*/ if (state.wrap) { /* to support ill-conceived Java test suite */ strm.adler = state.wrap & 1; } state.mode = HEAD; state.last = 0; state.havedict = 0; state.dmax = 32768; state.head = null/*Z_NULL*/; state.hold = 0; state.bits = 0; //state.lencode = state.distcode = state.next = state.codes; state.lencode = state.lendyn = new utils.Buf32(ENOUGH_LENS); state.distcode = state.distdyn = new utils.Buf32(ENOUGH_DISTS); state.sane = 1; state.back = -1; //Tracev((stderr, "inflate: reset\n")); return Z_OK; } function inflateReset(strm) { var state; if (!strm || !strm.state) { return Z_STREAM_ERROR; } state = strm.state; state.wsize = 0; state.whave = 0; state.wnext = 0; return inflateResetKeep(strm); } function inflateReset2(strm, windowBits) { var wrap; var state; /* get the state */ if (!strm || !strm.state) { return Z_STREAM_ERROR; } state = strm.state; /* extract wrap request from windowBits parameter */ if (windowBits < 0) { wrap = 0; windowBits = -windowBits; } else { wrap = (windowBits >> 4) + 1; if (windowBits < 48) { windowBits &= 15; } } /* set number of window bits, free window if different */ if (windowBits && (windowBits < 8 || windowBits > 15)) { return Z_STREAM_ERROR; } if (state.window !== null && state.wbits !== windowBits) { state.window = null; } /* update state and reset the rest of it */ state.wrap = wrap; state.wbits = windowBits; return inflateReset(strm); } function inflateInit2(strm, windowBits) { var ret; var state; if (!strm) { return Z_STREAM_ERROR; } //strm.msg = Z_NULL; /* in case we return an error */ state = new InflateState(); //if (state === Z_NULL) return Z_MEM_ERROR; //Tracev((stderr, "inflate: allocated\n")); strm.state = state; state.window = null/*Z_NULL*/; ret = inflateReset2(strm, windowBits); if (ret !== Z_OK) { strm.state = null/*Z_NULL*/; } return ret; } function inflateInit(strm) { return inflateInit2(strm, DEF_WBITS); } /* Return state with length and distance decoding tables and index sizes set to fixed code decoding. Normally this returns fixed tables from inffixed.h. If BUILDFIXED is defined, then instead this routine builds the tables the first time it's called, and returns those tables the first time and thereafter. This reduces the size of the code by about 2K bytes, in exchange for a little execution time. However, BUILDFIXED should not be used for threaded applications, since the rewriting of the tables and virgin may not be thread-safe. */ var virgin = true; var lenfix, distfix; // We have no pointers in JS, so keep tables separate function fixedtables(state) { /* build fixed huffman tables if first call (may not be thread safe) */ if (virgin) { var sym; lenfix = new utils.Buf32(512); distfix = new utils.Buf32(32); /* literal/length table */ sym = 0; while (sym < 144) { state.lens[sym++] = 8; } while (sym < 256) { state.lens[sym++] = 9; } while (sym < 280) { state.lens[sym++] = 7; } while (sym < 288) { state.lens[sym++] = 8; } inflate_table(LENS, state.lens, 0, 288, lenfix, 0, state.work, { bits: 9 }); /* distance table */ sym = 0; while (sym < 32) { state.lens[sym++] = 5; } inflate_table(DISTS, state.lens, 0, 32, distfix, 0, state.work, { bits: 5 }); /* do this just once */ virgin = false; } state.lencode = lenfix; state.lenbits = 9; state.distcode = distfix; state.distbits = 5; } /* Update the window with the last wsize (normally 32K) bytes written before returning. If window does not exist yet, create it. This is only called when a window is already in use, or when output has been written during this inflate call, but the end of the deflate stream has not been reached yet. It is also called to create a window for dictionary data when a dictionary is loaded. Providing output buffers larger than 32K to inflate() should provide a speed advantage, since only the last 32K of output is copied to the sliding window upon return from inflate(), and since all distances after the first 32K of output will fall in the output data, making match copies simpler and faster. The advantage may be dependent on the size of the processor's data caches. */ function updatewindow(strm, src, end, copy) { var dist; var state = strm.state; /* if it hasn't been done already, allocate space for the window */ if (state.window === null) { state.wsize = 1 << state.wbits; state.wnext = 0; state.whave = 0; state.window = new utils.Buf8(state.wsize); } /* copy state->wsize or less output bytes into the circular window */ if (copy >= state.wsize) { utils.arraySet(state.window, src, end - state.wsize, state.wsize, 0); state.wnext = 0; state.whave = state.wsize; } else { dist = state.wsize - state.wnext; if (dist > copy) { dist = copy; } //zmemcpy(state->window + state->wnext, end - copy, dist); utils.arraySet(state.window, src, end - copy, dist, state.wnext); copy -= dist; if (copy) { //zmemcpy(state->window, end - copy, copy); utils.arraySet(state.window, src, end - copy, copy, 0); state.wnext = copy; state.whave = state.wsize; } else { state.wnext += dist; if (state.wnext === state.wsize) { state.wnext = 0; } if (state.whave < state.wsize) { state.whave += dist; } } } return 0; } function inflate(strm, flush) { var state; var input, output; // input/output buffers var next; /* next input INDEX */ var put; /* next output INDEX */ var have, left; /* available input and output */ var hold; /* bit buffer */ var bits; /* bits in bit buffer */ var _in, _out; /* save starting available input and output */ var copy; /* number of stored or match bytes to copy */ var from; /* where to copy match bytes from */ var from_source; var here = 0; /* current decoding table entry */ var here_bits, here_op, here_val; // paked "here" denormalized (JS specific) //var last; /* parent table entry */ var last_bits, last_op, last_val; // paked "last" denormalized (JS specific) var len; /* length to copy for repeats, bits to drop */ var ret; /* return code */ var hbuf = new utils.Buf8(4); /* buffer for gzip header crc calculation */ var opts; var n; // temporary var for NEED_BITS var order = /* permutation of code lengths */ [ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ]; if (!strm || !strm.state || !strm.output || (!strm.input && strm.avail_in !== 0)) { return Z_STREAM_ERROR; } state = strm.state; if (state.mode === TYPE) { state.mode = TYPEDO; } /* skip check */ //--- LOAD() --- put = strm.next_out; output = strm.output; left = strm.avail_out; next = strm.next_in; input = strm.input; have = strm.avail_in; hold = state.hold; bits = state.bits; //--- _in = have; _out = left; ret = Z_OK; inf_leave: // goto emulation for (;;) { switch (state.mode) { case HEAD: if (state.wrap === 0) { state.mode = TYPEDO; break; } //=== NEEDBITS(16); while (bits < 16) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// if ((state.wrap & 2) && hold === 0x8b1f) { /* gzip header */ state.check = 0/*crc32(0L, Z_NULL, 0)*/; //=== CRC2(state.check, hold); hbuf[0] = hold & 0xff; hbuf[1] = (hold >>> 8) & 0xff; state.check = crc32(state.check, hbuf, 2, 0); //===// //=== INITBITS(); hold = 0; bits = 0; //===// state.mode = FLAGS; break; } state.flags = 0; /* expect zlib header */ if (state.head) { state.head.done = false; } if (!(state.wrap & 1) || /* check if zlib header allowed */ (((hold & 0xff)/*BITS(8)*/ << 8) + (hold >> 8)) % 31) { strm.msg = 'incorrect header check'; state.mode = BAD; break; } if ((hold & 0x0f)/*BITS(4)*/ !== Z_DEFLATED) { strm.msg = 'unknown compression method'; state.mode = BAD; break; } //--- DROPBITS(4) ---// hold >>>= 4; bits -= 4; //---// len = (hold & 0x0f)/*BITS(4)*/ + 8; if (state.wbits === 0) { state.wbits = len; } else if (len > state.wbits) { strm.msg = 'invalid window size'; state.mode = BAD; break; } state.dmax = 1 << len; //Tracev((stderr, "inflate: zlib header ok\n")); strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; state.mode = hold & 0x200 ? DICTID : TYPE; //=== INITBITS(); hold = 0; bits = 0; //===// break; case FLAGS: //=== NEEDBITS(16); */ while (bits < 16) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// state.flags = hold; if ((state.flags & 0xff) !== Z_DEFLATED) { strm.msg = 'unknown compression method'; state.mode = BAD; break; } if (state.flags & 0xe000) { strm.msg = 'unknown header flags set'; state.mode = BAD; break; } if (state.head) { state.head.text = ((hold >> 8) & 1); } if (state.flags & 0x0200) { //=== CRC2(state.check, hold); hbuf[0] = hold & 0xff; hbuf[1] = (hold >>> 8) & 0xff; state.check = crc32(state.check, hbuf, 2, 0); //===// } //=== INITBITS(); hold = 0; bits = 0; //===// state.mode = TIME; /* falls through */ case TIME: //=== NEEDBITS(32); */ while (bits < 32) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// if (state.head) { state.head.time = hold; } if (state.flags & 0x0200) { //=== CRC4(state.check, hold) hbuf[0] = hold & 0xff; hbuf[1] = (hold >>> 8) & 0xff; hbuf[2] = (hold >>> 16) & 0xff; hbuf[3] = (hold >>> 24) & 0xff; state.check = crc32(state.check, hbuf, 4, 0); //=== } //=== INITBITS(); hold = 0; bits = 0; //===// state.mode = OS; /* falls through */ case OS: //=== NEEDBITS(16); */ while (bits < 16) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// if (state.head) { state.head.xflags = (hold & 0xff); state.head.os = (hold >> 8); } if (state.flags & 0x0200) { //=== CRC2(state.check, hold); hbuf[0] = hold & 0xff; hbuf[1] = (hold >>> 8) & 0xff; state.check = crc32(state.check, hbuf, 2, 0); //===// } //=== INITBITS(); hold = 0; bits = 0; //===// state.mode = EXLEN; /* falls through */ case EXLEN: if (state.flags & 0x0400) { //=== NEEDBITS(16); */ while (bits < 16) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// state.length = hold; if (state.head) { state.head.extra_len = hold; } if (state.flags & 0x0200) { //=== CRC2(state.check, hold); hbuf[0] = hold & 0xff; hbuf[1] = (hold >>> 8) & 0xff; state.check = crc32(state.check, hbuf, 2, 0); //===// } //=== INITBITS(); hold = 0; bits = 0; //===// } else if (state.head) { state.head.extra = null/*Z_NULL*/; } state.mode = EXTRA; /* falls through */ case EXTRA: if (state.flags & 0x0400) { copy = state.length; if (copy > have) { copy = have; } if (copy) { if (state.head) { len = state.head.extra_len - state.length; if (!state.head.extra) { // Use untyped array for more convenient processing later state.head.extra = new Array(state.head.extra_len); } utils.arraySet( state.head.extra, input, next, // extra field is limited to 65536 bytes // - no need for additional size check copy, /*len + copy > state.head.extra_max - len ? state.head.extra_max : copy,*/ len ); //zmemcpy(state.head.extra + len, next, // len + copy > state.head.extra_max ? // state.head.extra_max - len : copy); } if (state.flags & 0x0200) { state.check = crc32(state.check, input, copy, next); } have -= copy; next += copy; state.length -= copy; } if (state.length) { break inf_leave; } } state.length = 0; state.mode = NAME; /* falls through */ case NAME: if (state.flags & 0x0800) { if (have === 0) { break inf_leave; } copy = 0; do { // TODO: 2 or 1 bytes? len = input[next + copy++]; /* use constant limit because in js we should not preallocate memory */ if (state.head && len && (state.length < 65536 /*state.head.name_max*/)) { state.head.name += String.fromCharCode(len); } } while (len && copy < have); if (state.flags & 0x0200) { state.check = crc32(state.check, input, copy, next); } have -= copy; next += copy; if (len) { break inf_leave; } } else if (state.head) { state.head.name = null; } state.length = 0; state.mode = COMMENT; /* falls through */ case COMMENT: if (state.flags & 0x1000) { if (have === 0) { break inf_leave; } copy = 0; do { len = input[next + copy++]; /* use constant limit because in js we should not preallocate memory */ if (state.head && len && (state.length < 65536 /*state.head.comm_max*/)) { state.head.comment += String.fromCharCode(len); } } while (len && copy < have); if (state.flags & 0x0200) { state.check = crc32(state.check, input, copy, next); } have -= copy; next += copy; if (len) { break inf_leave; } } else if (state.head) { state.head.comment = null; } state.mode = HCRC; /* falls through */ case HCRC: if (state.flags & 0x0200) { //=== NEEDBITS(16); */ while (bits < 16) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// if (hold !== (state.check & 0xffff)) { strm.msg = 'header crc mismatch'; state.mode = BAD; break; } //=== INITBITS(); hold = 0; bits = 0; //===// } if (state.head) { state.head.hcrc = ((state.flags >> 9) & 1); state.head.done = true; } strm.adler = state.check = 0; state.mode = TYPE; break; case DICTID: //=== NEEDBITS(32); */ while (bits < 32) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// strm.adler = state.check = zswap32(hold); //=== INITBITS(); hold = 0; bits = 0; //===// state.mode = DICT; /* falls through */ case DICT: if (state.havedict === 0) { //--- RESTORE() --- strm.next_out = put; strm.avail_out = left; strm.next_in = next; strm.avail_in = have; state.hold = hold; state.bits = bits; //--- return Z_NEED_DICT; } strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; state.mode = TYPE; /* falls through */ case TYPE: if (flush === Z_BLOCK || flush === Z_TREES) { break inf_leave; } /* falls through */ case TYPEDO: if (state.last) { //--- BYTEBITS() ---// hold >>>= bits & 7; bits -= bits & 7; //---// state.mode = CHECK; break; } //=== NEEDBITS(3); */ while (bits < 3) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// state.last = (hold & 0x01)/*BITS(1)*/; //--- DROPBITS(1) ---// hold >>>= 1; bits -= 1; //---// switch ((hold & 0x03)/*BITS(2)*/) { case 0: /* stored block */ //Tracev((stderr, "inflate: stored block%s\n", // state.last ? " (last)" : "")); state.mode = STORED; break; case 1: /* fixed block */ fixedtables(state); //Tracev((stderr, "inflate: fixed codes block%s\n", // state.last ? " (last)" : "")); state.mode = LEN_; /* decode codes */ if (flush === Z_TREES) { //--- DROPBITS(2) ---// hold >>>= 2; bits -= 2; //---// break inf_leave; } break; case 2: /* dynamic block */ //Tracev((stderr, "inflate: dynamic codes block%s\n", // state.last ? " (last)" : "")); state.mode = TABLE; break; case 3: strm.msg = 'invalid block type'; state.mode = BAD; } //--- DROPBITS(2) ---// hold >>>= 2; bits -= 2; //---// break; case STORED: //--- BYTEBITS() ---// /* go to byte boundary */ hold >>>= bits & 7; bits -= bits & 7; //---// //=== NEEDBITS(32); */ while (bits < 32) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// if ((hold & 0xffff) !== ((hold >>> 16) ^ 0xffff)) { strm.msg = 'invalid stored block lengths'; state.mode = BAD; break; } state.length = hold & 0xffff; //Tracev((stderr, "inflate: stored length %u\n", // state.length)); //=== INITBITS(); hold = 0; bits = 0; //===// state.mode = COPY_; if (flush === Z_TREES) { break inf_leave; } /* falls through */ case COPY_: state.mode = COPY; /* falls through */ case COPY: copy = state.length; if (copy) { if (copy > have) { copy = have; } if (copy > left) { copy = left; } if (copy === 0) { break inf_leave; } //--- zmemcpy(put, next, copy); --- utils.arraySet(output, input, next, copy, put); //---// have -= copy; next += copy; left -= copy; put += copy; state.length -= copy; break; } //Tracev((stderr, "inflate: stored end\n")); state.mode = TYPE; break; case TABLE: //=== NEEDBITS(14); */ while (bits < 14) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// state.nlen = (hold & 0x1f)/*BITS(5)*/ + 257; //--- DROPBITS(5) ---// hold >>>= 5; bits -= 5; //---// state.ndist = (hold & 0x1f)/*BITS(5)*/ + 1; //--- DROPBITS(5) ---// hold >>>= 5; bits -= 5; //---// state.ncode = (hold & 0x0f)/*BITS(4)*/ + 4; //--- DROPBITS(4) ---// hold >>>= 4; bits -= 4; //---// //#ifndef PKZIP_BUG_WORKAROUND if (state.nlen > 286 || state.ndist > 30) { strm.msg = 'too many length or distance symbols'; state.mode = BAD; break; } //#endif //Tracev((stderr, "inflate: table sizes ok\n")); state.have = 0; state.mode = LENLENS; /* falls through */ case LENLENS: while (state.have < state.ncode) { //=== NEEDBITS(3); while (bits < 3) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// state.lens[order[state.have++]] = (hold & 0x07);//BITS(3); //--- DROPBITS(3) ---// hold >>>= 3; bits -= 3; //---// } while (state.have < 19) { state.lens[order[state.have++]] = 0; } // We have separate tables & no pointers. 2 commented lines below not needed. //state.next = state.codes; //state.lencode = state.next; // Switch to use dynamic table state.lencode = state.lendyn; state.lenbits = 7; opts = { bits: state.lenbits }; ret = inflate_table(CODES, state.lens, 0, 19, state.lencode, 0, state.work, opts); state.lenbits = opts.bits; if (ret) { strm.msg = 'invalid code lengths set'; state.mode = BAD; break; } //Tracev((stderr, "inflate: code lengths ok\n")); state.have = 0; state.mode = CODELENS; /* falls through */ case CODELENS: while (state.have < state.nlen + state.ndist) { for (;;) { here = state.lencode[hold & ((1 << state.lenbits) - 1)];/*BITS(state.lenbits)*/ here_bits = here >>> 24; here_op = (here >>> 16) & 0xff; here_val = here & 0xffff; if ((here_bits) <= bits) { break; } //--- PULLBYTE() ---// if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; //---// } if (here_val < 16) { //--- DROPBITS(here.bits) ---// hold >>>= here_bits; bits -= here_bits; //---// state.lens[state.have++] = here_val; } else { if (here_val === 16) { //=== NEEDBITS(here.bits + 2); n = here_bits + 2; while (bits < n) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// //--- DROPBITS(here.bits) ---// hold >>>= here_bits; bits -= here_bits; //---// if (state.have === 0) { strm.msg = 'invalid bit length repeat'; state.mode = BAD; break; } len = state.lens[state.have - 1]; copy = 3 + (hold & 0x03);//BITS(2); //--- DROPBITS(2) ---// hold >>>= 2; bits -= 2; //---// } else if (here_val === 17) { //=== NEEDBITS(here.bits + 3); n = here_bits + 3; while (bits < n) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// //--- DROPBITS(here.bits) ---// hold >>>= here_bits; bits -= here_bits; //---// len = 0; copy = 3 + (hold & 0x07);//BITS(3); //--- DROPBITS(3) ---// hold >>>= 3; bits -= 3; //---// } else { //=== NEEDBITS(here.bits + 7); n = here_bits + 7; while (bits < n) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// //--- DROPBITS(here.bits) ---// hold >>>= here_bits; bits -= here_bits; //---// len = 0; copy = 11 + (hold & 0x7f);//BITS(7); //--- DROPBITS(7) ---// hold >>>= 7; bits -= 7; //---// } if (state.have + copy > state.nlen + state.ndist) { strm.msg = 'invalid bit length repeat'; state.mode = BAD; break; } while (copy--) { state.lens[state.have++] = len; } } } /* handle error breaks in while */ if (state.mode === BAD) { break; } /* check for end-of-block code (better have one) */ if (state.lens[256] === 0) { strm.msg = 'invalid code -- missing end-of-block'; state.mode = BAD; break; } /* build code tables -- note: do not change the lenbits or distbits values here (9 and 6) without reading the comments in inftrees.h concerning the ENOUGH constants, which depend on those values */ state.lenbits = 9; opts = { bits: state.lenbits }; ret = inflate_table(LENS, state.lens, 0, state.nlen, state.lencode, 0, state.work, opts); // We have separate tables & no pointers. 2 commented lines below not needed. // state.next_index = opts.table_index; state.lenbits = opts.bits; // state.lencode = state.next; if (ret) { strm.msg = 'invalid literal/lengths set'; state.mode = BAD; break; } state.distbits = 6; //state.distcode.copy(state.codes); // Switch to use dynamic table state.distcode = state.distdyn; opts = { bits: state.distbits }; ret = inflate_table(DISTS, state.lens, state.nlen, state.ndist, state.distcode, 0, state.work, opts); // We have separate tables & no pointers. 2 commented lines below not needed. // state.next_index = opts.table_index; state.distbits = opts.bits; // state.distcode = state.next; if (ret) { strm.msg = 'invalid distances set'; state.mode = BAD; break; } //Tracev((stderr, 'inflate: codes ok\n')); state.mode = LEN_; if (flush === Z_TREES) { break inf_leave; } /* falls through */ case LEN_: state.mode = LEN; /* falls through */ case LEN: if (have >= 6 && left >= 258) { //--- RESTORE() --- strm.next_out = put; strm.avail_out = left; strm.next_in = next; strm.avail_in = have; state.hold = hold; state.bits = bits; //--- inflate_fast(strm, _out); //--- LOAD() --- put = strm.next_out; output = strm.output; left = strm.avail_out; next = strm.next_in; input = strm.input; have = strm.avail_in; hold = state.hold; bits = state.bits; //--- if (state.mode === TYPE) { state.back = -1; } break; } state.back = 0; for (;;) { here = state.lencode[hold & ((1 << state.lenbits) - 1)]; /*BITS(state.lenbits)*/ here_bits = here >>> 24; here_op = (here >>> 16) & 0xff; here_val = here & 0xffff; if (here_bits <= bits) { break; } //--- PULLBYTE() ---// if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; //---// } if (here_op && (here_op & 0xf0) === 0) { last_bits = here_bits; last_op = here_op; last_val = here_val; for (;;) { here = state.lencode[last_val + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; here_bits = here >>> 24; here_op = (here >>> 16) & 0xff; here_val = here & 0xffff; if ((last_bits + here_bits) <= bits) { break; } //--- PULLBYTE() ---// if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; //---// } //--- DROPBITS(last.bits) ---// hold >>>= last_bits; bits -= last_bits; //---// state.back += last_bits; } //--- DROPBITS(here.bits) ---// hold >>>= here_bits; bits -= here_bits; //---// state.back += here_bits; state.length = here_val; if (here_op === 0) { //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? // "inflate: literal '%c'\n" : // "inflate: literal 0x%02x\n", here.val)); state.mode = LIT; break; } if (here_op & 32) { //Tracevv((stderr, "inflate: end of block\n")); state.back = -1; state.mode = TYPE; break; } if (here_op & 64) { strm.msg = 'invalid literal/length code'; state.mode = BAD; break; } state.extra = here_op & 15; state.mode = LENEXT; /* falls through */ case LENEXT: if (state.extra) { //=== NEEDBITS(state.extra); n = state.extra; while (bits < n) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// state.length += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; //--- DROPBITS(state.extra) ---// hold >>>= state.extra; bits -= state.extra; //---// state.back += state.extra; } //Tracevv((stderr, "inflate: length %u\n", state.length)); state.was = state.length; state.mode = DIST; /* falls through */ case DIST: for (;;) { here = state.distcode[hold & ((1 << state.distbits) - 1)];/*BITS(state.distbits)*/ here_bits = here >>> 24; here_op = (here >>> 16) & 0xff; here_val = here & 0xffff; if ((here_bits) <= bits) { break; } //--- PULLBYTE() ---// if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; //---// } if ((here_op & 0xf0) === 0) { last_bits = here_bits; last_op = here_op; last_val = here_val; for (;;) { here = state.distcode[last_val + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; here_bits = here >>> 24; here_op = (here >>> 16) & 0xff; here_val = here & 0xffff; if ((last_bits + here_bits) <= bits) { break; } //--- PULLBYTE() ---// if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; //---// } //--- DROPBITS(last.bits) ---// hold >>>= last_bits; bits -= last_bits; //---// state.back += last_bits; } //--- DROPBITS(here.bits) ---// hold >>>= here_bits; bits -= here_bits; //---// state.back += here_bits; if (here_op & 64) { strm.msg = 'invalid distance code'; state.mode = BAD; break; } state.offset = here_val; state.extra = (here_op) & 15; state.mode = DISTEXT; /* falls through */ case DISTEXT: if (state.extra) { //=== NEEDBITS(state.extra); n = state.extra; while (bits < n) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// state.offset += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; //--- DROPBITS(state.extra) ---// hold >>>= state.extra; bits -= state.extra; //---// state.back += state.extra; } //#ifdef INFLATE_STRICT if (state.offset > state.dmax) { strm.msg = 'invalid distance too far back'; state.mode = BAD; break; } //#endif //Tracevv((stderr, "inflate: distance %u\n", state.offset)); state.mode = MATCH; /* falls through */ case MATCH: if (left === 0) { break inf_leave; } copy = _out - left; if (state.offset > copy) { /* copy from window */ copy = state.offset - copy; if (copy > state.whave) { if (state.sane) { strm.msg = 'invalid distance too far back'; state.mode = BAD; break; } // (!) This block is disabled in zlib defaults, // don't enable it for binary compatibility //#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR // Trace((stderr, "inflate.c too far\n")); // copy -= state.whave; // if (copy > state.length) { copy = state.length; } // if (copy > left) { copy = left; } // left -= copy; // state.length -= copy; // do { // output[put++] = 0; // } while (--copy); // if (state.length === 0) { state.mode = LEN; } // break; //#endif } if (copy > state.wnext) { copy -= state.wnext; from = state.wsize - copy; } else { from = state.wnext - copy; } if (copy > state.length) { copy = state.length; } from_source = state.window; } else { /* copy from output */ from_source = output; from = put - state.offset; copy = state.length; } if (copy > left) { copy = left; } left -= copy; state.length -= copy; do { output[put++] = from_source[from++]; } while (--copy); if (state.length === 0) { state.mode = LEN; } break; case LIT: if (left === 0) { break inf_leave; } output[put++] = state.length; left--; state.mode = LEN; break; case CHECK: if (state.wrap) { //=== NEEDBITS(32); while (bits < 32) { if (have === 0) { break inf_leave; } have--; // Use '|' instead of '+' to make sure that result is signed hold |= input[next++] << bits; bits += 8; } //===// _out -= left; strm.total_out += _out; state.total += _out; if (_out) { strm.adler = state.check = /*UPDATE(state.check, put - _out, _out);*/ (state.flags ? crc32(state.check, output, _out, put - _out) : adler32(state.check, output, _out, put - _out)); } _out = left; // NB: crc32 stored as signed 32-bit int, zswap32 returns signed too if ((state.flags ? hold : zswap32(hold)) !== state.check) { strm.msg = 'incorrect data check'; state.mode = BAD; break; } //=== INITBITS(); hold = 0; bits = 0; //===// //Tracev((stderr, "inflate: check matches trailer\n")); } state.mode = LENGTH; /* falls through */ case LENGTH: if (state.wrap && state.flags) { //=== NEEDBITS(32); while (bits < 32) { if (have === 0) { break inf_leave; } have--; hold += input[next++] << bits; bits += 8; } //===// if (hold !== (state.total & 0xffffffff)) { strm.msg = 'incorrect length check'; state.mode = BAD; break; } //=== INITBITS(); hold = 0; bits = 0; //===// //Tracev((stderr, "inflate: length matches trailer\n")); } state.mode = DONE; /* falls through */ case DONE: ret = Z_STREAM_END; break inf_leave; case BAD: ret = Z_DATA_ERROR; break inf_leave; case MEM: return Z_MEM_ERROR; case SYNC: /* falls through */ default: return Z_STREAM_ERROR; } } // inf_leave <- here is real place for "goto inf_leave", emulated via "break inf_leave" /* Return from inflate(), updating the total counts and the check value. If there was no progress during the inflate() call, return a buffer error. Call updatewindow() to create and/or update the window state. Note: a memory error from inflate() is non-recoverable. */ //--- RESTORE() --- strm.next_out = put; strm.avail_out = left; strm.next_in = next; strm.avail_in = have; state.hold = hold; state.bits = bits; //--- if (state.wsize || (_out !== strm.avail_out && state.mode < BAD && (state.mode < CHECK || flush !== Z_FINISH))) { if (updatewindow(strm, strm.output, strm.next_out, _out - strm.avail_out)) { state.mode = MEM; return Z_MEM_ERROR; } } _in -= strm.avail_in; _out -= strm.avail_out; strm.total_in += _in; strm.total_out += _out; state.total += _out; if (state.wrap && _out) { strm.adler = state.check = /*UPDATE(state.check, strm.next_out - _out, _out);*/ (state.flags ? crc32(state.check, output, _out, strm.next_out - _out) : adler32(state.check, output, _out, strm.next_out - _out)); } strm.data_type = state.bits + (state.last ? 64 : 0) + (state.mode === TYPE ? 128 : 0) + (state.mode === LEN_ || state.mode === COPY_ ? 256 : 0); if (((_in === 0 && _out === 0) || flush === Z_FINISH) && ret === Z_OK) { ret = Z_BUF_ERROR; } return ret; } function inflateEnd(strm) { if (!strm || !strm.state /*|| strm->zfree == (free_func)0*/) { return Z_STREAM_ERROR; } var state = strm.state; if (state.window) { state.window = null; } strm.state = null; return Z_OK; } function inflateGetHeader(strm, head) { var state; /* check state */ if (!strm || !strm.state) { return Z_STREAM_ERROR; } state = strm.state; if ((state.wrap & 2) === 0) { return Z_STREAM_ERROR; } /* save header structure */ state.head = head; head.done = false; return Z_OK; } function inflateSetDictionary(strm, dictionary) { var dictLength = dictionary.length; var state; var dictid; var ret; /* check state */ if (!strm /* == Z_NULL */ || !strm.state /* == Z_NULL */) { return Z_STREAM_ERROR; } state = strm.state; if (state.wrap !== 0 && state.mode !== DICT) { return Z_STREAM_ERROR; } /* check for correct dictionary identifier */ if (state.mode === DICT) { dictid = 1; /* adler32(0, null, 0)*/ /* dictid = adler32(dictid, dictionary, dictLength); */ dictid = adler32(dictid, dictionary, dictLength, 0); if (dictid !== state.check) { return Z_DATA_ERROR; } } /* copy dictionary to window using updatewindow(), which will amend the existing dictionary if appropriate */ ret = updatewindow(strm, dictionary, dictLength, dictLength); if (ret) { state.mode = MEM; return Z_MEM_ERROR; } state.havedict = 1; // Tracev((stderr, "inflate: dictionary set\n")); return Z_OK; } exports.inflateReset = inflateReset; exports.inflateReset2 = inflateReset2; exports.inflateResetKeep = inflateResetKeep; exports.inflateInit = inflateInit; exports.inflateInit2 = inflateInit2; exports.inflate = inflate; exports.inflateEnd = inflateEnd; exports.inflateGetHeader = inflateGetHeader; exports.inflateSetDictionary = inflateSetDictionary; exports.inflateInfo = 'pako inflate (from Nodeca project)'; /* Not implemented exports.inflateCopy = inflateCopy; exports.inflateGetDictionary = inflateGetDictionary; exports.inflateMark = inflateMark; exports.inflatePrime = inflatePrime; exports.inflateSync = inflateSync; exports.inflateSyncPoint = inflateSyncPoint; exports.inflateUndermine = inflateUndermine; */ },{"../utils/common":82,"./adler32":84,"./crc32":86,"./inffast":89,"./inftrees":91}],91:[function(_dereq_,module,exports){ 'use strict'; // (C) 1995-2013 Jean-loup Gailly and Mark Adler // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // 3. This notice may not be removed or altered from any source distribution. var utils = _dereq_('../utils/common'); var MAXBITS = 15; var ENOUGH_LENS = 852; var ENOUGH_DISTS = 592; //var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); var CODES = 0; var LENS = 1; var DISTS = 2; var lbase = [ /* Length codes 257..285 base */ 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0 ]; var lext = [ /* Length codes 257..285 extra */ 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18, 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 72, 78 ]; var dbase = [ /* Distance codes 0..29 base */ 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, 8193, 12289, 16385, 24577, 0, 0 ]; var dext = [ /* Distance codes 0..29 extra */ 16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, 23, 23, 24, 24, 25, 25, 26, 26, 27, 27, 28, 28, 29, 29, 64, 64 ]; module.exports = function inflate_table(type, lens, lens_index, codes, table, table_index, work, opts) { var bits = opts.bits; //here = opts.here; /* table entry for duplication */ var len = 0; /* a code's length in bits */ var sym = 0; /* index of code symbols */ var min = 0, max = 0; /* minimum and maximum code lengths */ var root = 0; /* number of index bits for root table */ var curr = 0; /* number of index bits for current table */ var drop = 0; /* code bits to drop for sub-table */ var left = 0; /* number of prefix codes available */ var used = 0; /* code entries in table used */ var huff = 0; /* Huffman code */ var incr; /* for incrementing code, index */ var fill; /* index for replicating entries */ var low; /* low bits for current root entry */ var mask; /* mask for low root bits */ var next; /* next available space in table */ var base = null; /* base value table to use */ var base_index = 0; // var shoextra; /* extra bits table to use */ var end; /* use base and extra for symbol > end */ var count = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* number of codes of each length */ var offs = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* offsets in table for each length */ var extra = null; var extra_index = 0; var here_bits, here_op, here_val; /* Process a set of code lengths to create a canonical Huffman code. The code lengths are lens[0..codes-1]. Each length corresponds to the symbols 0..codes-1. The Huffman code is generated by first sorting the symbols by length from short to long, and retaining the symbol order for codes with equal lengths. Then the code starts with all zero bits for the first code of the shortest length, and the codes are integer increments for the same length, and zeros are appended as the length increases. For the deflate format, these bits are stored backwards from their more natural integer increment ordering, and so when the decoding tables are built in the large loop below, the integer codes are incremented backwards. This routine assumes, but does not check, that all of the entries in lens[] are in the range 0..MAXBITS. The caller must assure this. 1..MAXBITS is interpreted as that code length. zero means that that symbol does not occur in this code. The codes are sorted by computing a count of codes for each length, creating from that a table of starting indices for each length in the sorted table, and then entering the symbols in order in the sorted table. The sorted table is work[], with that space being provided by the caller. The length counts are used for other purposes as well, i.e. finding the minimum and maximum length codes, determining if there are any codes at all, checking for a valid set of lengths, and looking ahead at length counts to determine sub-table sizes when building the decoding tables. */ /* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */ for (len = 0; len <= MAXBITS; len++) { count[len] = 0; } for (sym = 0; sym < codes; sym++) { count[lens[lens_index + sym]]++; } /* bound code lengths, force root to be within code lengths */ root = bits; for (max = MAXBITS; max >= 1; max--) { if (count[max] !== 0) { break; } } if (root > max) { root = max; } if (max === 0) { /* no symbols to code at all */ //table.op[opts.table_index] = 64; //here.op = (var char)64; /* invalid code marker */ //table.bits[opts.table_index] = 1; //here.bits = (var char)1; //table.val[opts.table_index++] = 0; //here.val = (var short)0; table[table_index++] = (1 << 24) | (64 << 16) | 0; //table.op[opts.table_index] = 64; //table.bits[opts.table_index] = 1; //table.val[opts.table_index++] = 0; table[table_index++] = (1 << 24) | (64 << 16) | 0; opts.bits = 1; return 0; /* no symbols, but wait for decoding to report error */ } for (min = 1; min < max; min++) { if (count[min] !== 0) { break; } } if (root < min) { root = min; } /* check for an over-subscribed or incomplete set of lengths */ left = 1; for (len = 1; len <= MAXBITS; len++) { left <<= 1; left -= count[len]; if (left < 0) { return -1; } /* over-subscribed */ } if (left > 0 && (type === CODES || max !== 1)) { return -1; /* incomplete set */ } /* generate offsets into symbol table for each length for sorting */ offs[1] = 0; for (len = 1; len < MAXBITS; len++) { offs[len + 1] = offs[len] + count[len]; } /* sort symbols by length, by symbol order within each length */ for (sym = 0; sym < codes; sym++) { if (lens[lens_index + sym] !== 0) { work[offs[lens[lens_index + sym]]++] = sym; } } /* Create and fill in decoding tables. In this loop, the table being filled is at next and has curr index bits. The code being used is huff with length len. That code is converted to an index by dropping drop bits off of the bottom. For codes where len is less than drop + curr, those top drop + curr - len bits are incremented through all values to fill the table with replicated entries. root is the number of index bits for the root table. When len exceeds root, sub-tables are created pointed to by the root entry with an index of the low root bits of huff. This is saved in low to check for when a new sub-table should be started. drop is zero when the root table is being filled, and drop is root when sub-tables are being filled. When a new sub-table is needed, it is necessary to look ahead in the code lengths to determine what size sub-table is needed. The length counts are used for this, and so count[] is decremented as codes are entered in the tables. used keeps track of how many table entries have been allocated from the provided *table space. It is checked for LENS and DIST tables against the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in the initial root table size constants. See the comments in inftrees.h for more information. sym increments through all symbols, and the loop terminates when all codes of length max, i.e. all codes, have been processed. This routine permits incomplete codes, so another loop after this one fills in the rest of the decoding tables with invalid code markers. */ /* set up for code type */ // poor man optimization - use if-else instead of switch, // to avoid deopts in old v8 if (type === CODES) { base = extra = work; /* dummy value--not used */ end = 19; } else if (type === LENS) { base = lbase; base_index -= 257; extra = lext; extra_index -= 257; end = 256; } else { /* DISTS */ base = dbase; extra = dext; end = -1; } /* initialize opts for loop */ huff = 0; /* starting code */ sym = 0; /* starting code symbol */ len = min; /* starting code length */ next = table_index; /* current table to fill in */ curr = root; /* current table index bits */ drop = 0; /* current bits to drop from code for index */ low = -1; /* trigger new sub-table when len > root */ used = 1 << root; /* use root table entries */ mask = used - 1; /* mask for comparing low */ /* check available table space */ if ((type === LENS && used > ENOUGH_LENS) || (type === DISTS && used > ENOUGH_DISTS)) { return 1; } /* process all codes and make table entries */ for (;;) { /* create table entry */ here_bits = len - drop; if (work[sym] < end) { here_op = 0; here_val = work[sym]; } else if (work[sym] > end) { here_op = extra[extra_index + work[sym]]; here_val = base[base_index + work[sym]]; } else { here_op = 32 + 64; /* end of block */ here_val = 0; } /* replicate for those indices with low len bits equal to huff */ incr = 1 << (len - drop); fill = 1 << curr; min = fill; /* save offset to next table */ do { fill -= incr; table[next + (huff >> drop) + fill] = (here_bits << 24) | (here_op << 16) | here_val |0; } while (fill !== 0); /* backwards increment the len-bit code huff */ incr = 1 << (len - 1); while (huff & incr) { incr >>= 1; } if (incr !== 0) { huff &= incr - 1; huff += incr; } else { huff = 0; } /* go to next symbol, update count, len */ sym++; if (--count[len] === 0) { if (len === max) { break; } len = lens[lens_index + work[sym]]; } /* create new sub-table if needed */ if (len > root && (huff & mask) !== low) { /* if first time, transition to sub-tables */ if (drop === 0) { drop = root; } /* increment past last table */ next += min; /* here min is 1 << curr */ /* determine length of next table */ curr = len - drop; left = 1 << curr; while (curr + drop < max) { left -= count[curr + drop]; if (left <= 0) { break; } curr++; left <<= 1; } /* check for enough space */ used += 1 << curr; if ((type === LENS && used > ENOUGH_LENS) || (type === DISTS && used > ENOUGH_DISTS)) { return 1; } /* point entry in root table to sub-table */ low = huff & mask; /*table.op[low] = curr; table.bits[low] = root; table.val[low] = next - opts.table_index;*/ table[low] = (root << 24) | (curr << 16) | (next - table_index) |0; } } /* fill in remaining table entry if code is incomplete (guaranteed to have at most one remaining entry, since if the code is incomplete, the maximum code length that was allowed to get this far is one bit) */ if (huff !== 0) { //table.op[next + huff] = 64; /* invalid code marker */ //table.bits[next + huff] = len - drop; //table.val[next + huff] = 0; table[next + huff] = ((len - drop) << 24) | (64 << 16) |0; } /* set return parameters */ //opts.table_index += used; opts.bits = root; return 0; }; },{"../utils/common":82}],92:[function(_dereq_,module,exports){ 'use strict'; // (C) 1995-2013 Jean-loup Gailly and Mark Adler // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // 3. This notice may not be removed or altered from any source distribution. module.exports = { 2: 'need dictionary', /* Z_NEED_DICT 2 */ 1: 'stream end', /* Z_STREAM_END 1 */ 0: '', /* Z_OK 0 */ '-1': 'file error', /* Z_ERRNO (-1) */ '-2': 'stream error', /* Z_STREAM_ERROR (-2) */ '-3': 'data error', /* Z_DATA_ERROR (-3) */ '-4': 'insufficient memory', /* Z_MEM_ERROR (-4) */ '-5': 'buffer error', /* Z_BUF_ERROR (-5) */ '-6': 'incompatible version' /* Z_VERSION_ERROR (-6) */ }; },{}],93:[function(_dereq_,module,exports){ 'use strict'; // (C) 1995-2013 Jean-loup Gailly and Mark Adler // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // 3. This notice may not be removed or altered from any source distribution. var utils = _dereq_('../utils/common'); /* Public constants ==========================================================*/ /* ===========================================================================*/ //var Z_FILTERED = 1; //var Z_HUFFMAN_ONLY = 2; //var Z_RLE = 3; var Z_FIXED = 4; //var Z_DEFAULT_STRATEGY = 0; /* Possible values of the data_type field (though see inflate()) */ var Z_BINARY = 0; var Z_TEXT = 1; //var Z_ASCII = 1; // = Z_TEXT var Z_UNKNOWN = 2; /*============================================================================*/ function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } // From zutil.h var STORED_BLOCK = 0; var STATIC_TREES = 1; var DYN_TREES = 2; /* The three kinds of block type */ var MIN_MATCH = 3; var MAX_MATCH = 258; /* The minimum and maximum match lengths */ // From deflate.h /* =========================================================================== * Internal compression state. */ var LENGTH_CODES = 29; /* number of length codes, not counting the special END_BLOCK code */ var LITERALS = 256; /* number of literal bytes 0..255 */ var L_CODES = LITERALS + 1 + LENGTH_CODES; /* number of Literal or Length codes, including the END_BLOCK code */ var D_CODES = 30; /* number of distance codes */ var BL_CODES = 19; /* number of codes used to transfer the bit lengths */ var HEAP_SIZE = 2 * L_CODES + 1; /* maximum heap size */ var MAX_BITS = 15; /* All codes must not exceed MAX_BITS bits */ var Buf_size = 16; /* size of bit buffer in bi_buf */ /* =========================================================================== * Constants */ var MAX_BL_BITS = 7; /* Bit length codes must not exceed MAX_BL_BITS bits */ var END_BLOCK = 256; /* end of block literal code */ var REP_3_6 = 16; /* repeat previous bit length 3-6 times (2 bits of repeat count) */ var REPZ_3_10 = 17; /* repeat a zero length 3-10 times (3 bits of repeat count) */ var REPZ_11_138 = 18; /* repeat a zero length 11-138 times (7 bits of repeat count) */ /* eslint-disable comma-spacing,array-bracket-spacing */ var extra_lbits = /* extra bits for each length code */ [0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0]; var extra_dbits = /* extra bits for each distance code */ [0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13]; var extra_blbits = /* extra bits for each bit length code */ [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7]; var bl_order = [16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15]; /* eslint-enable comma-spacing,array-bracket-spacing */ /* The lengths of the bit length codes are sent in order of decreasing * probability, to avoid transmitting the lengths for unused bit length codes. */ /* =========================================================================== * Local data. These are initialized only once. */ // We pre-fill arrays with 0 to avoid uninitialized gaps var DIST_CODE_LEN = 512; /* see definition of array dist_code below */ // !!!! Use flat array instead of structure, Freq = i*2, Len = i*2+1 var static_ltree = new Array((L_CODES + 2) * 2); zero(static_ltree); /* The static literal tree. Since the bit lengths are imposed, there is no * need for the L_CODES extra codes used during heap construction. However * The codes 286 and 287 are needed to build a canonical tree (see _tr_init * below). */ var static_dtree = new Array(D_CODES * 2); zero(static_dtree); /* The static distance tree. (Actually a trivial tree since all codes use * 5 bits.) */ var _dist_code = new Array(DIST_CODE_LEN); zero(_dist_code); /* Distance codes. The first 256 values correspond to the distances * 3 .. 258, the last 256 values correspond to the top 8 bits of * the 15 bit distances. */ var _length_code = new Array(MAX_MATCH - MIN_MATCH + 1); zero(_length_code); /* length code for each normalized match length (0 == MIN_MATCH) */ var base_length = new Array(LENGTH_CODES); zero(base_length); /* First normalized length for each code (0 = MIN_MATCH) */ var base_dist = new Array(D_CODES); zero(base_dist); /* First normalized distance for each code (0 = distance of 1) */ function StaticTreeDesc(static_tree, extra_bits, extra_base, elems, max_length) { this.static_tree = static_tree; /* static tree or NULL */ this.extra_bits = extra_bits; /* extra bits for each code or NULL */ this.extra_base = extra_base; /* base index for extra_bits */ this.elems = elems; /* max number of elements in the tree */ this.max_length = max_length; /* max bit length for the codes */ // show if `static_tree` has data or dummy - needed for monomorphic objects this.has_stree = static_tree && static_tree.length; } var static_l_desc; var static_d_desc; var static_bl_desc; function TreeDesc(dyn_tree, stat_desc) { this.dyn_tree = dyn_tree; /* the dynamic tree */ this.max_code = 0; /* largest code with non zero frequency */ this.stat_desc = stat_desc; /* the corresponding static tree */ } function d_code(dist) { return dist < 256 ? _dist_code[dist] : _dist_code[256 + (dist >>> 7)]; } /* =========================================================================== * Output a short LSB first on the stream. * IN assertion: there is enough room in pendingBuf. */ function put_short(s, w) { // put_byte(s, (uch)((w) & 0xff)); // put_byte(s, (uch)((ush)(w) >> 8)); s.pending_buf[s.pending++] = (w) & 0xff; s.pending_buf[s.pending++] = (w >>> 8) & 0xff; } /* =========================================================================== * Send a value on a given number of bits. * IN assertion: length <= 16 and value fits in length bits. */ function send_bits(s, value, length) { if (s.bi_valid > (Buf_size - length)) { s.bi_buf |= (value << s.bi_valid) & 0xffff; put_short(s, s.bi_buf); s.bi_buf = value >> (Buf_size - s.bi_valid); s.bi_valid += length - Buf_size; } else { s.bi_buf |= (value << s.bi_valid) & 0xffff; s.bi_valid += length; } } function send_code(s, c, tree) { send_bits(s, tree[c * 2]/*.Code*/, tree[c * 2 + 1]/*.Len*/); } /* =========================================================================== * Reverse the first len bits of a code, using straightforward code (a faster * method would use a table) * IN assertion: 1 <= len <= 15 */ function bi_reverse(code, len) { var res = 0; do { res |= code & 1; code >>>= 1; res <<= 1; } while (--len > 0); return res >>> 1; } /* =========================================================================== * Flush the bit buffer, keeping at most 7 bits in it. */ function bi_flush(s) { if (s.bi_valid === 16) { put_short(s, s.bi_buf); s.bi_buf = 0; s.bi_valid = 0; } else if (s.bi_valid >= 8) { s.pending_buf[s.pending++] = s.bi_buf & 0xff; s.bi_buf >>= 8; s.bi_valid -= 8; } } /* =========================================================================== * Compute the optimal bit lengths for a tree and update the total bit length * for the current block. * IN assertion: the fields freq and dad are set, heap[heap_max] and * above are the tree nodes sorted by increasing frequency. * OUT assertions: the field len is set to the optimal bit length, the * array bl_count contains the frequencies for each bit length. * The length opt_len is updated; static_len is also updated if stree is * not null. */ function gen_bitlen(s, desc) // deflate_state *s; // tree_desc *desc; /* the tree descriptor */ { var tree = desc.dyn_tree; var max_code = desc.max_code; var stree = desc.stat_desc.static_tree; var has_stree = desc.stat_desc.has_stree; var extra = desc.stat_desc.extra_bits; var base = desc.stat_desc.extra_base; var max_length = desc.stat_desc.max_length; var h; /* heap index */ var n, m; /* iterate over the tree elements */ var bits; /* bit length */ var xbits; /* extra bits */ var f; /* frequency */ var overflow = 0; /* number of elements with bit length too large */ for (bits = 0; bits <= MAX_BITS; bits++) { s.bl_count[bits] = 0; } /* In a first pass, compute the optimal bit lengths (which may * overflow in the case of the bit length tree). */ tree[s.heap[s.heap_max] * 2 + 1]/*.Len*/ = 0; /* root of the heap */ for (h = s.heap_max + 1; h < HEAP_SIZE; h++) { n = s.heap[h]; bits = tree[tree[n * 2 + 1]/*.Dad*/ * 2 + 1]/*.Len*/ + 1; if (bits > max_length) { bits = max_length; overflow++; } tree[n * 2 + 1]/*.Len*/ = bits; /* We overwrite tree[n].Dad which is no longer needed */ if (n > max_code) { continue; } /* not a leaf node */ s.bl_count[bits]++; xbits = 0; if (n >= base) { xbits = extra[n - base]; } f = tree[n * 2]/*.Freq*/; s.opt_len += f * (bits + xbits); if (has_stree) { s.static_len += f * (stree[n * 2 + 1]/*.Len*/ + xbits); } } if (overflow === 0) { return; } // Trace((stderr,"\nbit length overflow\n")); /* This happens for example on obj2 and pic of the Calgary corpus */ /* Find the first bit length which could increase: */ do { bits = max_length - 1; while (s.bl_count[bits] === 0) { bits--; } s.bl_count[bits]--; /* move one leaf down the tree */ s.bl_count[bits + 1] += 2; /* move one overflow item as its brother */ s.bl_count[max_length]--; /* The brother of the overflow item also moves one step up, * but this does not affect bl_count[max_length] */ overflow -= 2; } while (overflow > 0); /* Now recompute all bit lengths, scanning in increasing frequency. * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all * lengths instead of fixing only the wrong ones. This idea is taken * from 'ar' written by Haruhiko Okumura.) */ for (bits = max_length; bits !== 0; bits--) { n = s.bl_count[bits]; while (n !== 0) { m = s.heap[--h]; if (m > max_code) { continue; } if (tree[m * 2 + 1]/*.Len*/ !== bits) { // Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); s.opt_len += (bits - tree[m * 2 + 1]/*.Len*/) * tree[m * 2]/*.Freq*/; tree[m * 2 + 1]/*.Len*/ = bits; } n--; } } } /* =========================================================================== * Generate the codes for a given tree and bit counts (which need not be * optimal). * IN assertion: the array bl_count contains the bit length statistics for * the given tree and the field len is set for all tree elements. * OUT assertion: the field code is set for all tree elements of non * zero code length. */ function gen_codes(tree, max_code, bl_count) // ct_data *tree; /* the tree to decorate */ // int max_code; /* largest code with non zero frequency */ // ushf *bl_count; /* number of codes at each bit length */ { var next_code = new Array(MAX_BITS + 1); /* next code value for each bit length */ var code = 0; /* running code value */ var bits; /* bit index */ var n; /* code index */ /* The distribution counts are first used to generate the code values * without bit reversal. */ for (bits = 1; bits <= MAX_BITS; bits++) { next_code[bits] = code = (code + bl_count[bits - 1]) << 1; } /* Check that the bit counts in bl_count are consistent. The last code * must be all ones. */ //Assert (code + bl_count[MAX_BITS]-1 == (1< length code (0..28) */ length = 0; for (code = 0; code < LENGTH_CODES - 1; code++) { base_length[code] = length; for (n = 0; n < (1 << extra_lbits[code]); n++) { _length_code[length++] = code; } } //Assert (length == 256, "tr_static_init: length != 256"); /* Note that the length 255 (match length 258) can be represented * in two different ways: code 284 + 5 bits or code 285, so we * overwrite length_code[255] to use the best encoding: */ _length_code[length - 1] = code; /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ dist = 0; for (code = 0; code < 16; code++) { base_dist[code] = dist; for (n = 0; n < (1 << extra_dbits[code]); n++) { _dist_code[dist++] = code; } } //Assert (dist == 256, "tr_static_init: dist != 256"); dist >>= 7; /* from now on, all distances are divided by 128 */ for (; code < D_CODES; code++) { base_dist[code] = dist << 7; for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { _dist_code[256 + dist++] = code; } } //Assert (dist == 256, "tr_static_init: 256+dist != 512"); /* Construct the codes of the static literal tree */ for (bits = 0; bits <= MAX_BITS; bits++) { bl_count[bits] = 0; } n = 0; while (n <= 143) { static_ltree[n * 2 + 1]/*.Len*/ = 8; n++; bl_count[8]++; } while (n <= 255) { static_ltree[n * 2 + 1]/*.Len*/ = 9; n++; bl_count[9]++; } while (n <= 279) { static_ltree[n * 2 + 1]/*.Len*/ = 7; n++; bl_count[7]++; } while (n <= 287) { static_ltree[n * 2 + 1]/*.Len*/ = 8; n++; bl_count[8]++; } /* Codes 286 and 287 do not exist, but we must include them in the * tree construction to get a canonical Huffman tree (longest code * all ones) */ gen_codes(static_ltree, L_CODES + 1, bl_count); /* The static distance tree is trivial: */ for (n = 0; n < D_CODES; n++) { static_dtree[n * 2 + 1]/*.Len*/ = 5; static_dtree[n * 2]/*.Code*/ = bi_reverse(n, 5); } // Now data ready and we can init static trees static_l_desc = new StaticTreeDesc(static_ltree, extra_lbits, LITERALS + 1, L_CODES, MAX_BITS); static_d_desc = new StaticTreeDesc(static_dtree, extra_dbits, 0, D_CODES, MAX_BITS); static_bl_desc = new StaticTreeDesc(new Array(0), extra_blbits, 0, BL_CODES, MAX_BL_BITS); //static_init_done = true; } /* =========================================================================== * Initialize a new block. */ function init_block(s) { var n; /* iterates over tree elements */ /* Initialize the trees. */ for (n = 0; n < L_CODES; n++) { s.dyn_ltree[n * 2]/*.Freq*/ = 0; } for (n = 0; n < D_CODES; n++) { s.dyn_dtree[n * 2]/*.Freq*/ = 0; } for (n = 0; n < BL_CODES; n++) { s.bl_tree[n * 2]/*.Freq*/ = 0; } s.dyn_ltree[END_BLOCK * 2]/*.Freq*/ = 1; s.opt_len = s.static_len = 0; s.last_lit = s.matches = 0; } /* =========================================================================== * Flush the bit buffer and align the output on a byte boundary */ function bi_windup(s) { if (s.bi_valid > 8) { put_short(s, s.bi_buf); } else if (s.bi_valid > 0) { //put_byte(s, (Byte)s->bi_buf); s.pending_buf[s.pending++] = s.bi_buf; } s.bi_buf = 0; s.bi_valid = 0; } /* =========================================================================== * Copy a stored block, storing first the length and its * one's complement if requested. */ function copy_block(s, buf, len, header) //DeflateState *s; //charf *buf; /* the input data */ //unsigned len; /* its length */ //int header; /* true if block header must be written */ { bi_windup(s); /* align on byte boundary */ if (header) { put_short(s, len); put_short(s, ~len); } // while (len--) { // put_byte(s, *buf++); // } utils.arraySet(s.pending_buf, s.window, buf, len, s.pending); s.pending += len; } /* =========================================================================== * Compares to subtrees, using the tree depth as tie breaker when * the subtrees have equal frequency. This minimizes the worst case length. */ function smaller(tree, n, m, depth) { var _n2 = n * 2; var _m2 = m * 2; return (tree[_n2]/*.Freq*/ < tree[_m2]/*.Freq*/ || (tree[_n2]/*.Freq*/ === tree[_m2]/*.Freq*/ && depth[n] <= depth[m])); } /* =========================================================================== * Restore the heap property by moving down the tree starting at node k, * exchanging a node with the smallest of its two sons if necessary, stopping * when the heap property is re-established (each father smaller than its * two sons). */ function pqdownheap(s, tree, k) // deflate_state *s; // ct_data *tree; /* the tree to restore */ // int k; /* node to move down */ { var v = s.heap[k]; var j = k << 1; /* left son of k */ while (j <= s.heap_len) { /* Set j to the smallest of the two sons: */ if (j < s.heap_len && smaller(tree, s.heap[j + 1], s.heap[j], s.depth)) { j++; } /* Exit if v is smaller than both sons */ if (smaller(tree, v, s.heap[j], s.depth)) { break; } /* Exchange v with the smallest son */ s.heap[k] = s.heap[j]; k = j; /* And continue down the tree, setting j to the left son of k */ j <<= 1; } s.heap[k] = v; } // inlined manually // var SMALLEST = 1; /* =========================================================================== * Send the block data compressed using the given Huffman trees */ function compress_block(s, ltree, dtree) // deflate_state *s; // const ct_data *ltree; /* literal tree */ // const ct_data *dtree; /* distance tree */ { var dist; /* distance of matched string */ var lc; /* match length or unmatched char (if dist == 0) */ var lx = 0; /* running index in l_buf */ var code; /* the code to send */ var extra; /* number of extra bits to send */ if (s.last_lit !== 0) { do { dist = (s.pending_buf[s.d_buf + lx * 2] << 8) | (s.pending_buf[s.d_buf + lx * 2 + 1]); lc = s.pending_buf[s.l_buf + lx]; lx++; if (dist === 0) { send_code(s, lc, ltree); /* send a literal byte */ //Tracecv(isgraph(lc), (stderr," '%c' ", lc)); } else { /* Here, lc is the match length - MIN_MATCH */ code = _length_code[lc]; send_code(s, code + LITERALS + 1, ltree); /* send the length code */ extra = extra_lbits[code]; if (extra !== 0) { lc -= base_length[code]; send_bits(s, lc, extra); /* send the extra length bits */ } dist--; /* dist is now the match distance - 1 */ code = d_code(dist); //Assert (code < D_CODES, "bad d_code"); send_code(s, code, dtree); /* send the distance code */ extra = extra_dbits[code]; if (extra !== 0) { dist -= base_dist[code]; send_bits(s, dist, extra); /* send the extra distance bits */ } } /* literal or match pair ? */ /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ //Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx, // "pendingBuf overflow"); } while (lx < s.last_lit); } send_code(s, END_BLOCK, ltree); } /* =========================================================================== * Construct one Huffman tree and assigns the code bit strings and lengths. * Update the total bit length for the current block. * IN assertion: the field freq is set for all tree elements. * OUT assertions: the fields len and code are set to the optimal bit length * and corresponding code. The length opt_len is updated; static_len is * also updated if stree is not null. The field max_code is set. */ function build_tree(s, desc) // deflate_state *s; // tree_desc *desc; /* the tree descriptor */ { var tree = desc.dyn_tree; var stree = desc.stat_desc.static_tree; var has_stree = desc.stat_desc.has_stree; var elems = desc.stat_desc.elems; var n, m; /* iterate over heap elements */ var max_code = -1; /* largest code with non zero frequency */ var node; /* new node being created */ /* Construct the initial heap, with least frequent element in * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. * heap[0] is not used. */ s.heap_len = 0; s.heap_max = HEAP_SIZE; for (n = 0; n < elems; n++) { if (tree[n * 2]/*.Freq*/ !== 0) { s.heap[++s.heap_len] = max_code = n; s.depth[n] = 0; } else { tree[n * 2 + 1]/*.Len*/ = 0; } } /* The pkzip format requires that at least one distance code exists, * and that at least one bit should be sent even if there is only one * possible code. So to avoid special checks later on we force at least * two codes of non zero frequency. */ while (s.heap_len < 2) { node = s.heap[++s.heap_len] = (max_code < 2 ? ++max_code : 0); tree[node * 2]/*.Freq*/ = 1; s.depth[node] = 0; s.opt_len--; if (has_stree) { s.static_len -= stree[node * 2 + 1]/*.Len*/; } /* node is 0 or 1 so it does not have extra bits */ } desc.max_code = max_code; /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, * establish sub-heaps of increasing lengths: */ for (n = (s.heap_len >> 1/*int /2*/); n >= 1; n--) { pqdownheap(s, tree, n); } /* Construct the Huffman tree by repeatedly combining the least two * frequent nodes. */ node = elems; /* next internal node of the tree */ do { //pqremove(s, tree, n); /* n = node of least frequency */ /*** pqremove ***/ n = s.heap[1/*SMALLEST*/]; s.heap[1/*SMALLEST*/] = s.heap[s.heap_len--]; pqdownheap(s, tree, 1/*SMALLEST*/); /***/ m = s.heap[1/*SMALLEST*/]; /* m = node of next least frequency */ s.heap[--s.heap_max] = n; /* keep the nodes sorted by frequency */ s.heap[--s.heap_max] = m; /* Create a new node father of n and m */ tree[node * 2]/*.Freq*/ = tree[n * 2]/*.Freq*/ + tree[m * 2]/*.Freq*/; s.depth[node] = (s.depth[n] >= s.depth[m] ? s.depth[n] : s.depth[m]) + 1; tree[n * 2 + 1]/*.Dad*/ = tree[m * 2 + 1]/*.Dad*/ = node; /* and insert the new node in the heap */ s.heap[1/*SMALLEST*/] = node++; pqdownheap(s, tree, 1/*SMALLEST*/); } while (s.heap_len >= 2); s.heap[--s.heap_max] = s.heap[1/*SMALLEST*/]; /* At this point, the fields freq and dad are set. We can now * generate the bit lengths. */ gen_bitlen(s, desc); /* The field len is now set, we can generate the bit codes */ gen_codes(tree, max_code, s.bl_count); } /* =========================================================================== * Scan a literal or distance tree to determine the frequencies of the codes * in the bit length tree. */ function scan_tree(s, tree, max_code) // deflate_state *s; // ct_data *tree; /* the tree to be scanned */ // int max_code; /* and its largest code of non zero frequency */ { var n; /* iterates over all tree elements */ var prevlen = -1; /* last emitted length */ var curlen; /* length of current code */ var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ var count = 0; /* repeat count of the current code */ var max_count = 7; /* max repeat count */ var min_count = 4; /* min repeat count */ if (nextlen === 0) { max_count = 138; min_count = 3; } tree[(max_code + 1) * 2 + 1]/*.Len*/ = 0xffff; /* guard */ for (n = 0; n <= max_code; n++) { curlen = nextlen; nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; if (++count < max_count && curlen === nextlen) { continue; } else if (count < min_count) { s.bl_tree[curlen * 2]/*.Freq*/ += count; } else if (curlen !== 0) { if (curlen !== prevlen) { s.bl_tree[curlen * 2]/*.Freq*/++; } s.bl_tree[REP_3_6 * 2]/*.Freq*/++; } else if (count <= 10) { s.bl_tree[REPZ_3_10 * 2]/*.Freq*/++; } else { s.bl_tree[REPZ_11_138 * 2]/*.Freq*/++; } count = 0; prevlen = curlen; if (nextlen === 0) { max_count = 138; min_count = 3; } else if (curlen === nextlen) { max_count = 6; min_count = 3; } else { max_count = 7; min_count = 4; } } } /* =========================================================================== * Send a literal or distance tree in compressed form, using the codes in * bl_tree. */ function send_tree(s, tree, max_code) // deflate_state *s; // ct_data *tree; /* the tree to be scanned */ // int max_code; /* and its largest code of non zero frequency */ { var n; /* iterates over all tree elements */ var prevlen = -1; /* last emitted length */ var curlen; /* length of current code */ var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ var count = 0; /* repeat count of the current code */ var max_count = 7; /* max repeat count */ var min_count = 4; /* min repeat count */ /* tree[max_code+1].Len = -1; */ /* guard already set */ if (nextlen === 0) { max_count = 138; min_count = 3; } for (n = 0; n <= max_code; n++) { curlen = nextlen; nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; if (++count < max_count && curlen === nextlen) { continue; } else if (count < min_count) { do { send_code(s, curlen, s.bl_tree); } while (--count !== 0); } else if (curlen !== 0) { if (curlen !== prevlen) { send_code(s, curlen, s.bl_tree); count--; } //Assert(count >= 3 && count <= 6, " 3_6?"); send_code(s, REP_3_6, s.bl_tree); send_bits(s, count - 3, 2); } else if (count <= 10) { send_code(s, REPZ_3_10, s.bl_tree); send_bits(s, count - 3, 3); } else { send_code(s, REPZ_11_138, s.bl_tree); send_bits(s, count - 11, 7); } count = 0; prevlen = curlen; if (nextlen === 0) { max_count = 138; min_count = 3; } else if (curlen === nextlen) { max_count = 6; min_count = 3; } else { max_count = 7; min_count = 4; } } } /* =========================================================================== * Construct the Huffman tree for the bit lengths and return the index in * bl_order of the last bit length code to send. */ function build_bl_tree(s) { var max_blindex; /* index of last bit length code of non zero freq */ /* Determine the bit length frequencies for literal and distance trees */ scan_tree(s, s.dyn_ltree, s.l_desc.max_code); scan_tree(s, s.dyn_dtree, s.d_desc.max_code); /* Build the bit length tree: */ build_tree(s, s.bl_desc); /* opt_len now includes the length of the tree representations, except * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. */ /* Determine the number of bit length codes to send. The pkzip format * requires that at least 4 bit length codes be sent. (appnote.txt says * 3 but the actual value used is 4.) */ for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) { if (s.bl_tree[bl_order[max_blindex] * 2 + 1]/*.Len*/ !== 0) { break; } } /* Update opt_len to include the bit length tree and counts */ s.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; //Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", // s->opt_len, s->static_len)); return max_blindex; } /* =========================================================================== * Send the header for a block using dynamic Huffman trees: the counts, the * lengths of the bit length codes, the literal tree and the distance tree. * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. */ function send_all_trees(s, lcodes, dcodes, blcodes) // deflate_state *s; // int lcodes, dcodes, blcodes; /* number of codes for each tree */ { var rank; /* index in bl_order */ //Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); //Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, // "too many codes"); //Tracev((stderr, "\nbl counts: ")); send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */ send_bits(s, dcodes - 1, 5); send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */ for (rank = 0; rank < blcodes; rank++) { //Tracev((stderr, "\nbl code %2d ", bl_order[rank])); send_bits(s, s.bl_tree[bl_order[rank] * 2 + 1]/*.Len*/, 3); } //Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); send_tree(s, s.dyn_ltree, lcodes - 1); /* literal tree */ //Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); send_tree(s, s.dyn_dtree, dcodes - 1); /* distance tree */ //Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); } /* =========================================================================== * Check if the data type is TEXT or BINARY, using the following algorithm: * - TEXT if the two conditions below are satisfied: * a) There are no non-portable control characters belonging to the * "black list" (0..6, 14..25, 28..31). * b) There is at least one printable character belonging to the * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). * - BINARY otherwise. * - The following partially-portable control characters form a * "gray list" that is ignored in this detection algorithm: * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). * IN assertion: the fields Freq of dyn_ltree are set. */ function detect_data_type(s) { /* black_mask is the bit mask of black-listed bytes * set bits 0..6, 14..25, and 28..31 * 0xf3ffc07f = binary 11110011111111111100000001111111 */ var black_mask = 0xf3ffc07f; var n; /* Check for non-textual ("black-listed") bytes. */ for (n = 0; n <= 31; n++, black_mask >>>= 1) { if ((black_mask & 1) && (s.dyn_ltree[n * 2]/*.Freq*/ !== 0)) { return Z_BINARY; } } /* Check for textual ("white-listed") bytes. */ if (s.dyn_ltree[9 * 2]/*.Freq*/ !== 0 || s.dyn_ltree[10 * 2]/*.Freq*/ !== 0 || s.dyn_ltree[13 * 2]/*.Freq*/ !== 0) { return Z_TEXT; } for (n = 32; n < LITERALS; n++) { if (s.dyn_ltree[n * 2]/*.Freq*/ !== 0) { return Z_TEXT; } } /* There are no "black-listed" or "white-listed" bytes: * this stream either is empty or has tolerated ("gray-listed") bytes only. */ return Z_BINARY; } var static_init_done = false; /* =========================================================================== * Initialize the tree data structures for a new zlib stream. */ function _tr_init(s) { if (!static_init_done) { tr_static_init(); static_init_done = true; } s.l_desc = new TreeDesc(s.dyn_ltree, static_l_desc); s.d_desc = new TreeDesc(s.dyn_dtree, static_d_desc); s.bl_desc = new TreeDesc(s.bl_tree, static_bl_desc); s.bi_buf = 0; s.bi_valid = 0; /* Initialize the first block of the first file: */ init_block(s); } /* =========================================================================== * Send a stored block */ function _tr_stored_block(s, buf, stored_len, last) //DeflateState *s; //charf *buf; /* input block */ //ulg stored_len; /* length of input block */ //int last; /* one if this is the last block for a file */ { send_bits(s, (STORED_BLOCK << 1) + (last ? 1 : 0), 3); /* send block type */ copy_block(s, buf, stored_len, true); /* with header */ } /* =========================================================================== * Send one empty static block to give enough lookahead for inflate. * This takes 10 bits, of which 7 may remain in the bit buffer. */ function _tr_align(s) { send_bits(s, STATIC_TREES << 1, 3); send_code(s, END_BLOCK, static_ltree); bi_flush(s); } /* =========================================================================== * Determine the best encoding for the current block: dynamic trees, static * trees or store, and output the encoded block to the zip file. */ function _tr_flush_block(s, buf, stored_len, last) //DeflateState *s; //charf *buf; /* input block, or NULL if too old */ //ulg stored_len; /* length of input block */ //int last; /* one if this is the last block for a file */ { var opt_lenb, static_lenb; /* opt_len and static_len in bytes */ var max_blindex = 0; /* index of last bit length code of non zero freq */ /* Build the Huffman trees unless a stored block is forced */ if (s.level > 0) { /* Check if the file is binary or text */ if (s.strm.data_type === Z_UNKNOWN) { s.strm.data_type = detect_data_type(s); } /* Construct the literal and distance trees */ build_tree(s, s.l_desc); // Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, // s->static_len)); build_tree(s, s.d_desc); // Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, // s->static_len)); /* At this point, opt_len and static_len are the total bit lengths of * the compressed block data, excluding the tree representations. */ /* Build the bit length tree for the above two trees, and get the index * in bl_order of the last bit length code to send. */ max_blindex = build_bl_tree(s); /* Determine the best encoding. Compute the block lengths in bytes. */ opt_lenb = (s.opt_len + 3 + 7) >>> 3; static_lenb = (s.static_len + 3 + 7) >>> 3; // Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", // opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, // s->last_lit)); if (static_lenb <= opt_lenb) { opt_lenb = static_lenb; } } else { // Assert(buf != (char*)0, "lost buf"); opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ } if ((stored_len + 4 <= opt_lenb) && (buf !== -1)) { /* 4: two words for the lengths */ /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. * Otherwise we can't have processed more than WSIZE input bytes since * the last block flush, because compression would have been * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to * transform a block into a stored block. */ _tr_stored_block(s, buf, stored_len, last); } else if (s.strategy === Z_FIXED || static_lenb === opt_lenb) { send_bits(s, (STATIC_TREES << 1) + (last ? 1 : 0), 3); compress_block(s, static_ltree, static_dtree); } else { send_bits(s, (DYN_TREES << 1) + (last ? 1 : 0), 3); send_all_trees(s, s.l_desc.max_code + 1, s.d_desc.max_code + 1, max_blindex + 1); compress_block(s, s.dyn_ltree, s.dyn_dtree); } // Assert (s->compressed_len == s->bits_sent, "bad compressed size"); /* The above check is made mod 2^32, for files larger than 512 MB * and uLong implemented on 32 bits. */ init_block(s); if (last) { bi_windup(s); } // Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, // s->compressed_len-7*last)); } /* =========================================================================== * Save the match info and tally the frequency counts. Return true if * the current block must be flushed. */ function _tr_tally(s, dist, lc) // deflate_state *s; // unsigned dist; /* distance of matched string */ // unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ { //var out_length, in_length, dcode; s.pending_buf[s.d_buf + s.last_lit * 2] = (dist >>> 8) & 0xff; s.pending_buf[s.d_buf + s.last_lit * 2 + 1] = dist & 0xff; s.pending_buf[s.l_buf + s.last_lit] = lc & 0xff; s.last_lit++; if (dist === 0) { /* lc is the unmatched char */ s.dyn_ltree[lc * 2]/*.Freq*/++; } else { s.matches++; /* Here, lc is the match length - MIN_MATCH */ dist--; /* dist = match distance - 1 */ //Assert((ush)dist < (ush)MAX_DIST(s) && // (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && // (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); s.dyn_ltree[(_length_code[lc] + LITERALS + 1) * 2]/*.Freq*/++; s.dyn_dtree[d_code(dist) * 2]/*.Freq*/++; } // (!) This block is disabled in zlib defaults, // don't enable it for binary compatibility //#ifdef TRUNCATE_BLOCK // /* Try to guess if it is profitable to stop the current block here */ // if ((s.last_lit & 0x1fff) === 0 && s.level > 2) { // /* Compute an upper bound for the compressed length */ // out_length = s.last_lit*8; // in_length = s.strstart - s.block_start; // // for (dcode = 0; dcode < D_CODES; dcode++) { // out_length += s.dyn_dtree[dcode*2]/*.Freq*/ * (5 + extra_dbits[dcode]); // } // out_length >>>= 3; // //Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", // // s->last_lit, in_length, out_length, // // 100L - out_length*100L/in_length)); // if (s.matches < (s.last_lit>>1)/*int /2*/ && out_length < (in_length>>1)/*int /2*/) { // return true; // } // } //#endif return (s.last_lit === s.lit_bufsize - 1); /* We avoid equality with lit_bufsize because of wraparound at 64K * on 16 bit machines and because stored blocks are restricted to * 64K-1 bytes. */ } exports._tr_init = _tr_init; exports._tr_stored_block = _tr_stored_block; exports._tr_flush_block = _tr_flush_block; exports._tr_tally = _tr_tally; exports._tr_align = _tr_align; },{"../utils/common":82}],94:[function(_dereq_,module,exports){ 'use strict'; // (C) 1995-2013 Jean-loup Gailly and Mark Adler // (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // 3. This notice may not be removed or altered from any source distribution. function ZStream() { /* next input byte */ this.input = null; // JS specific, because we have no pointers this.next_in = 0; /* number of bytes available at input */ this.avail_in = 0; /* total number of input bytes read so far */ this.total_in = 0; /* next output byte should be put there */ this.output = null; // JS specific, because we have no pointers this.next_out = 0; /* remaining free space at output */ this.avail_out = 0; /* total number of bytes output so far */ this.total_out = 0; /* last error message, NULL if no error */ this.msg = ''/*Z_NULL*/; /* not visible by applications */ this.state = null; /* best guess about the data type: binary or text */ this.data_type = 2/*Z_UNKNOWN*/; /* adler32 value of the uncompressed data */ this.adler = 0; } module.exports = ZStream; },{}],95:[function(_dereq_,module,exports){ // shim for using process in browser var process = module.exports = {}; // cached from whatever global is present so that test runners that stub it // don't break things. But we need to wrap it in a try catch in case it is // wrapped in strict mode code which doesn't define any globals. It's inside a // function because try/catches deoptimize in certain engines. var cachedSetTimeout; var cachedClearTimeout; function defaultSetTimout() { throw new Error('setTimeout has not been defined'); } function defaultClearTimeout () { throw new Error('clearTimeout has not been defined'); } (function () { try { if (typeof setTimeout === 'function') { cachedSetTimeout = setTimeout; } else { cachedSetTimeout = defaultSetTimout; } } catch (e) { cachedSetTimeout = defaultSetTimout; } try { if (typeof clearTimeout === 'function') { cachedClearTimeout = clearTimeout; } else { cachedClearTimeout = defaultClearTimeout; } } catch (e) { cachedClearTimeout = defaultClearTimeout; } } ()) function runTimeout(fun) { if (cachedSetTimeout === setTimeout) { //normal enviroments in sane situations return setTimeout(fun, 0); } // if setTimeout wasn't available but was latter defined if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { cachedSetTimeout = setTimeout; return setTimeout(fun, 0); } try { // when when somebody has screwed with setTimeout but no I.E. maddness return cachedSetTimeout(fun, 0); } catch(e){ try { // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally return cachedSetTimeout.call(null, fun, 0); } catch(e){ // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error return cachedSetTimeout.call(this, fun, 0); } } } function runClearTimeout(marker) { if (cachedClearTimeout === clearTimeout) { //normal enviroments in sane situations return clearTimeout(marker); } // if clearTimeout wasn't available but was latter defined if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { cachedClearTimeout = clearTimeout; return clearTimeout(marker); } try { // when when somebody has screwed with setTimeout but no I.E. maddness return cachedClearTimeout(marker); } catch (e){ try { // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally return cachedClearTimeout.call(null, marker); } catch (e){ // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error. // Some versions of I.E. have different rules for clearTimeout vs setTimeout return cachedClearTimeout.call(this, marker); } } } var queue = []; var draining = false; var currentQueue; var queueIndex = -1; function cleanUpNextTick() { if (!draining || !currentQueue) { return; } draining = false; if (currentQueue.length) { queue = currentQueue.concat(queue); } else { queueIndex = -1; } if (queue.length) { drainQueue(); } } function drainQueue() { if (draining) { return; } var timeout = runTimeout(cleanUpNextTick); draining = true; var len = queue.length; while(len) { currentQueue = queue; queue = []; while (++queueIndex < len) { if (currentQueue) { currentQueue[queueIndex].run(); } } queueIndex = -1; len = queue.length; } currentQueue = null; draining = false; runClearTimeout(timeout); } process.nextTick = function (fun) { var args = new Array(arguments.length - 1); if (arguments.length > 1) { for (var i = 1; i < arguments.length; i++) { args[i - 1] = arguments[i]; } } queue.push(new Item(fun, args)); if (queue.length === 1 && !draining) { runTimeout(drainQueue); } }; // v8 likes predictible objects function Item(fun, array) { this.fun = fun; this.array = array; } Item.prototype.run = function () { this.fun.apply(null, this.array); }; process.title = 'browser'; process.browser = true; process.env = {}; process.argv = []; process.version = ''; // empty string to avoid regexp issues process.versions = {}; function noop() {} process.on = noop; process.addListener = noop; process.once = noop; process.off = noop; process.removeListener = noop; process.removeAllListeners = noop; process.emit = noop; process.prependListener = noop; process.prependOnceListener = noop; process.listeners = function (name) { return [] } process.binding = function (name) { throw new Error('process.binding is not supported'); }; process.cwd = function () { return '/' }; process.chdir = function (dir) { throw new Error('process.chdir is not supported'); }; process.umask = function() { return 0; }; },{}]},{},[1]);