Initial
This commit is contained in:
22
resources/app/node_modules/nedb/LICENSE
generated
vendored
Executable file
22
resources/app/node_modules/nedb/LICENSE
generated
vendored
Executable file
@@ -0,0 +1,22 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2013 Louis Chatriot <louis.chatriot@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
308
resources/app/node_modules/nedb/benchmarks/commonUtilities.js
generated
vendored
Executable file
308
resources/app/node_modules/nedb/benchmarks/commonUtilities.js
generated
vendored
Executable file
@@ -0,0 +1,308 @@
|
||||
/**
|
||||
* Functions that are used in several benchmark tests
|
||||
*/
|
||||
|
||||
var customUtils = require('../lib/customUtils')
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, Datastore = require('../lib/datastore')
|
||||
, Persistence = require('../lib/persistence')
|
||||
, executeAsap // process.nextTick or setImmediate depending on your Node version
|
||||
;
|
||||
|
||||
try {
|
||||
executeAsap = setImmediate;
|
||||
} catch (e) {
|
||||
executeAsap = process.nextTick;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Configure the benchmark
|
||||
*/
|
||||
module.exports.getConfiguration = function (benchDb) {
|
||||
var d, n
|
||||
, program = require('commander')
|
||||
;
|
||||
|
||||
program
|
||||
.option('-n --number [number]', 'Size of the collection to test on', parseInt)
|
||||
.option('-i --with-index', 'Use an index')
|
||||
.option('-m --in-memory', 'Test with an in-memory only store')
|
||||
.parse(process.argv);
|
||||
|
||||
n = program.number || 10000;
|
||||
|
||||
console.log("----------------------------");
|
||||
console.log("Test with " + n + " documents");
|
||||
console.log(program.withIndex ? "Use an index" : "Don't use an index");
|
||||
console.log(program.inMemory ? "Use an in-memory datastore" : "Use a persistent datastore");
|
||||
console.log("----------------------------");
|
||||
|
||||
d = new Datastore({ filename: benchDb
|
||||
, inMemoryOnly: program.inMemory
|
||||
});
|
||||
|
||||
return { n: n, d: d, program: program };
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Ensure the workspace exists and the db datafile is empty
|
||||
*/
|
||||
module.exports.prepareDb = function (filename, cb) {
|
||||
Persistence.ensureDirectoryExists(path.dirname(filename), function () {
|
||||
fs.exists(filename, function (exists) {
|
||||
if (exists) {
|
||||
fs.unlink(filename, cb);
|
||||
} else { return cb(); }
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Return an array with the numbers from 0 to n-1, in a random order
|
||||
* Uses Fisher Yates algorithm
|
||||
* Useful to get fair tests
|
||||
*/
|
||||
function getRandomArray (n) {
|
||||
var res = []
|
||||
, i, j, temp
|
||||
;
|
||||
|
||||
for (i = 0; i < n; i += 1) { res[i] = i; }
|
||||
|
||||
for (i = n - 1; i >= 1; i -= 1) {
|
||||
j = Math.floor((i + 1) * Math.random());
|
||||
temp = res[i];
|
||||
res[i] = res[j];
|
||||
res[j] = temp;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
module.exports.getRandomArray = getRandomArray;
|
||||
|
||||
|
||||
/**
|
||||
* Insert a certain number of documents for testing
|
||||
*/
|
||||
module.exports.insertDocs = function (d, n, profiler, cb) {
|
||||
var beg = new Date()
|
||||
, order = getRandomArray(n)
|
||||
;
|
||||
|
||||
profiler.step('Begin inserting ' + n + ' docs');
|
||||
|
||||
function runFrom(i) {
|
||||
if (i === n) { // Finished
|
||||
var opsPerSecond = Math.floor(1000* n / profiler.elapsedSinceLastStep());
|
||||
console.log("===== RESULT (insert) ===== " + opsPerSecond + " ops/s");
|
||||
profiler.step('Finished inserting ' + n + ' docs');
|
||||
profiler.insertOpsPerSecond = opsPerSecond;
|
||||
return cb();
|
||||
}
|
||||
|
||||
d.insert({ docNumber: order[i] }, function (err) {
|
||||
executeAsap(function () {
|
||||
runFrom(i + 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
runFrom(0);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Find documents with find
|
||||
*/
|
||||
module.exports.findDocs = function (d, n, profiler, cb) {
|
||||
var beg = new Date()
|
||||
, order = getRandomArray(n)
|
||||
;
|
||||
|
||||
profiler.step("Finding " + n + " documents");
|
||||
|
||||
function runFrom(i) {
|
||||
if (i === n) { // Finished
|
||||
console.log("===== RESULT (find) ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s");
|
||||
profiler.step('Finished finding ' + n + ' docs');
|
||||
return cb();
|
||||
}
|
||||
|
||||
d.find({ docNumber: order[i] }, function (err, docs) {
|
||||
if (docs.length !== 1 || docs[0].docNumber !== order[i]) { return cb('One find didnt work'); }
|
||||
executeAsap(function () {
|
||||
runFrom(i + 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
runFrom(0);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Find documents with find and the $in operator
|
||||
*/
|
||||
module.exports.findDocsWithIn = function (d, n, profiler, cb) {
|
||||
var beg = new Date()
|
||||
, order = getRandomArray(n)
|
||||
, ins = [], i, j
|
||||
, arraySize = Math.min(10, n) // The array for $in needs to be smaller than n (inclusive)
|
||||
;
|
||||
|
||||
// Preparing all the $in arrays, will take some time
|
||||
for (i = 0; i < n; i += 1) {
|
||||
ins[i] = [];
|
||||
|
||||
for (j = 0; j < arraySize; j += 1) {
|
||||
ins[i].push((i + j) % n);
|
||||
}
|
||||
}
|
||||
|
||||
profiler.step("Finding " + n + " documents WITH $IN OPERATOR");
|
||||
|
||||
function runFrom(i) {
|
||||
if (i === n) { // Finished
|
||||
console.log("===== RESULT (find with in selector) ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s");
|
||||
profiler.step('Finished finding ' + n + ' docs');
|
||||
return cb();
|
||||
}
|
||||
|
||||
d.find({ docNumber: { $in: ins[i] } }, function (err, docs) {
|
||||
if (docs.length !== arraySize) { return cb('One find didnt work'); }
|
||||
executeAsap(function () {
|
||||
runFrom(i + 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
runFrom(0);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Find documents with findOne
|
||||
*/
|
||||
module.exports.findOneDocs = function (d, n, profiler, cb) {
|
||||
var beg = new Date()
|
||||
, order = getRandomArray(n)
|
||||
;
|
||||
|
||||
profiler.step("FindingOne " + n + " documents");
|
||||
|
||||
function runFrom(i) {
|
||||
if (i === n) { // Finished
|
||||
console.log("===== RESULT (findOne) ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s");
|
||||
profiler.step('Finished finding ' + n + ' docs');
|
||||
return cb();
|
||||
}
|
||||
|
||||
d.findOne({ docNumber: order[i] }, function (err, doc) {
|
||||
if (!doc || doc.docNumber !== order[i]) { return cb('One find didnt work'); }
|
||||
executeAsap(function () {
|
||||
runFrom(i + 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
runFrom(0);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update documents
|
||||
* options is the same as the options object for update
|
||||
*/
|
||||
module.exports.updateDocs = function (options, d, n, profiler, cb) {
|
||||
var beg = new Date()
|
||||
, order = getRandomArray(n)
|
||||
;
|
||||
|
||||
profiler.step("Updating " + n + " documents");
|
||||
|
||||
function runFrom(i) {
|
||||
if (i === n) { // Finished
|
||||
console.log("===== RESULT (update) ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s");
|
||||
profiler.step('Finished updating ' + n + ' docs');
|
||||
return cb();
|
||||
}
|
||||
|
||||
// Will not actually modify the document but will take the same time
|
||||
d.update({ docNumber: order[i] }, { docNumber: order[i] }, options, function (err, nr) {
|
||||
if (err) { return cb(err); }
|
||||
if (nr !== 1) { return cb('One update didnt work'); }
|
||||
executeAsap(function () {
|
||||
runFrom(i + 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
runFrom(0);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove documents
|
||||
* options is the same as the options object for update
|
||||
*/
|
||||
module.exports.removeDocs = function (options, d, n, profiler, cb) {
|
||||
var beg = new Date()
|
||||
, order = getRandomArray(n)
|
||||
;
|
||||
|
||||
profiler.step("Removing " + n + " documents");
|
||||
|
||||
function runFrom(i) {
|
||||
if (i === n) { // Finished
|
||||
// opsPerSecond corresponds to 1 insert + 1 remove, needed to keep collection size at 10,000
|
||||
// We need to subtract the time taken by one insert to get the time actually taken by one remove
|
||||
var opsPerSecond = Math.floor(1000 * n / profiler.elapsedSinceLastStep());
|
||||
var removeOpsPerSecond = Math.floor(1 / ((1 / opsPerSecond) - (1 / profiler.insertOpsPerSecond)))
|
||||
console.log("===== RESULT (remove) ===== " + removeOpsPerSecond + " ops/s");
|
||||
profiler.step('Finished removing ' + n + ' docs');
|
||||
return cb();
|
||||
}
|
||||
|
||||
d.remove({ docNumber: order[i] }, options, function (err, nr) {
|
||||
if (err) { return cb(err); }
|
||||
if (nr !== 1) { return cb('One remove didnt work'); }
|
||||
d.insert({ docNumber: order[i] }, function (err) { // We need to reinsert the doc so that we keep the collection's size at n
|
||||
// So actually we're calculating the average time taken by one insert + one remove
|
||||
executeAsap(function () {
|
||||
runFrom(i + 1);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
runFrom(0);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Load database
|
||||
*/
|
||||
module.exports.loadDatabase = function (d, n, profiler, cb) {
|
||||
var beg = new Date()
|
||||
, order = getRandomArray(n)
|
||||
;
|
||||
|
||||
profiler.step("Loading the database " + n + " times");
|
||||
|
||||
function runFrom(i) {
|
||||
if (i === n) { // Finished
|
||||
console.log("===== RESULT ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s");
|
||||
profiler.step('Finished loading a database' + n + ' times');
|
||||
return cb();
|
||||
}
|
||||
|
||||
d.loadDatabase(function (err) {
|
||||
executeAsap(function () {
|
||||
runFrom(i + 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
runFrom(0);
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
51
resources/app/node_modules/nedb/benchmarks/ensureIndex.js
generated
vendored
Executable file
51
resources/app/node_modules/nedb/benchmarks/ensureIndex.js
generated
vendored
Executable file
@@ -0,0 +1,51 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/insert.bench.db'
|
||||
, async = require('async')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('INSERT BENCH')
|
||||
, d = new Datastore(benchDb)
|
||||
, program = require('commander')
|
||||
, n
|
||||
;
|
||||
|
||||
program
|
||||
.option('-n --number [number]', 'Size of the collection to test on', parseInt)
|
||||
.option('-i --with-index', 'Test with an index')
|
||||
.parse(process.argv);
|
||||
|
||||
n = program.number || 10000;
|
||||
|
||||
console.log("----------------------------");
|
||||
console.log("Test with " + n + " documents");
|
||||
console.log("----------------------------");
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
, function (cb) {
|
||||
var i;
|
||||
|
||||
profiler.step('Begin calling ensureIndex ' + n + ' times');
|
||||
|
||||
for (i = 0; i < n; i += 1) {
|
||||
d.ensureIndex({ fieldName: 'docNumber' });
|
||||
delete d.indexes.docNumber;
|
||||
}
|
||||
|
||||
console.log("Average time for one ensureIndex: " + (profiler.elapsedSinceLastStep() / n) + "ms");
|
||||
profiler.step('Finished calling ensureIndex ' + n + ' times');
|
||||
}
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
||||
|
||||
30
resources/app/node_modules/nedb/benchmarks/find.js
generated
vendored
Executable file
30
resources/app/node_modules/nedb/benchmarks/find.js
generated
vendored
Executable file
@@ -0,0 +1,30 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/find.bench.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, async = require('async')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('FIND BENCH')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, config = commonUtilities.getConfiguration(benchDb)
|
||||
, d = config.d
|
||||
, n = config.n
|
||||
;
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); }
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
, async.apply(commonUtilities.findDocs, d, n, profiler)
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
||||
31
resources/app/node_modules/nedb/benchmarks/findOne.js
generated
vendored
Executable file
31
resources/app/node_modules/nedb/benchmarks/findOne.js
generated
vendored
Executable file
@@ -0,0 +1,31 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/findOne.bench.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, async = require('async')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('FINDONE BENCH')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, config = commonUtilities.getConfiguration(benchDb)
|
||||
, d = config.d
|
||||
, n = config.n
|
||||
;
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); }
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
, function (cb) { setTimeout(function () {cb();}, 500); }
|
||||
, async.apply(commonUtilities.findOneDocs, d, n, profiler)
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
||||
30
resources/app/node_modules/nedb/benchmarks/findWithIn.js
generated
vendored
Executable file
30
resources/app/node_modules/nedb/benchmarks/findWithIn.js
generated
vendored
Executable file
@@ -0,0 +1,30 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/find.bench.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, async = require('async')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('FIND BENCH')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, config = commonUtilities.getConfiguration(benchDb)
|
||||
, d = config.d
|
||||
, n = config.n
|
||||
;
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); }
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
, async.apply(commonUtilities.findDocsWithIn, d, n, profiler)
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
||||
33
resources/app/node_modules/nedb/benchmarks/insert.js
generated
vendored
Executable file
33
resources/app/node_modules/nedb/benchmarks/insert.js
generated
vendored
Executable file
@@ -0,0 +1,33 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/insert.bench.db'
|
||||
, async = require('async')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('INSERT BENCH')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, config = commonUtilities.getConfiguration(benchDb)
|
||||
, d = config.d
|
||||
, n = config.n
|
||||
;
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
if (config.program.withIndex) {
|
||||
d.ensureIndex({ fieldName: 'docNumber' });
|
||||
n = 2 * n; // We will actually insert twice as many documents
|
||||
// because the index is slower when the collection is already
|
||||
// big. So the result given by the algorithm will be a bit worse than
|
||||
// actual performance
|
||||
}
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
||||
38
resources/app/node_modules/nedb/benchmarks/loadDatabase.js
generated
vendored
Executable file
38
resources/app/node_modules/nedb/benchmarks/loadDatabase.js
generated
vendored
Executable file
@@ -0,0 +1,38 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/loaddb.bench.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, async = require('async')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('LOADDB BENCH')
|
||||
, d = new Datastore(benchDb)
|
||||
, program = require('commander')
|
||||
, n
|
||||
;
|
||||
|
||||
program
|
||||
.option('-n --number [number]', 'Size of the collection to test on', parseInt)
|
||||
.option('-i --with-index', 'Test with an index')
|
||||
.parse(process.argv);
|
||||
|
||||
n = program.number || 10000;
|
||||
|
||||
console.log("----------------------------");
|
||||
console.log("Test with " + n + " documents");
|
||||
console.log(program.withIndex ? "Use an index" : "Don't use an index");
|
||||
console.log("----------------------------");
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(cb);
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
, async.apply(commonUtilities.loadDatabase, d, n, profiler)
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
||||
38
resources/app/node_modules/nedb/benchmarks/remove.js
generated
vendored
Executable file
38
resources/app/node_modules/nedb/benchmarks/remove.js
generated
vendored
Executable file
@@ -0,0 +1,38 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/remove.bench.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, async = require('async')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('REMOVE BENCH')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, config = commonUtilities.getConfiguration(benchDb)
|
||||
, d = config.d
|
||||
, n = config.n
|
||||
;
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); }
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
|
||||
// Test with remove only one document
|
||||
, function (cb) { profiler.step('MULTI: FALSE'); return cb(); }
|
||||
, async.apply(commonUtilities.removeDocs, { multi: false }, d, n, profiler)
|
||||
// Test with multiple documents
|
||||
, function (cb) { d.remove({}, { multi: true }, function () { return cb(); }); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
, function (cb) { profiler.step('MULTI: TRUE'); return cb(); }
|
||||
, async.apply(commonUtilities.removeDocs, { multi: true }, d, n, profiler)
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
||||
39
resources/app/node_modules/nedb/benchmarks/update.js
generated
vendored
Executable file
39
resources/app/node_modules/nedb/benchmarks/update.js
generated
vendored
Executable file
@@ -0,0 +1,39 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/update.bench.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, async = require('async')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('UPDATE BENCH')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, config = commonUtilities.getConfiguration(benchDb)
|
||||
, d = config.d
|
||||
, n = config.n
|
||||
;
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); }
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
|
||||
// Test with update only one document
|
||||
, function (cb) { profiler.step('MULTI: FALSE'); return cb(); }
|
||||
, async.apply(commonUtilities.updateDocs, { multi: false }, d, n, profiler)
|
||||
|
||||
// Test with multiple documents
|
||||
, function (cb) { d.remove({}, { multi: true }, function (err) { return cb(); }); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
, function (cb) { profiler.step('MULTI: TRUE'); return cb(); }
|
||||
, async.apply(commonUtilities.updateDocs, { multi: true }, d, n, profiler)
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
||||
6
resources/app/node_modules/nedb/bower.json
generated
vendored
Executable file
6
resources/app/node_modules/nedb/bower.json
generated
vendored
Executable file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"name": "nedb",
|
||||
"description": "The Javascript Database for Node, nwjs, Electron and the browser",
|
||||
"ignore": ["benchmarks", "lib", "test", "test_lac"],
|
||||
"main": ["browser-version/nedb.js", "browser-version/nedb.min.js"]
|
||||
}
|
||||
78
resources/app/node_modules/nedb/browser-version/browser-specific/lib/customUtils.js
generated
vendored
Executable file
78
resources/app/node_modules/nedb/browser-version/browser-specific/lib/customUtils.js
generated
vendored
Executable file
@@ -0,0 +1,78 @@
|
||||
/**
|
||||
* Specific customUtils for the browser, where we don't have access to the Crypto and Buffer modules
|
||||
*/
|
||||
|
||||
/**
|
||||
* Taken from the crypto-browserify module
|
||||
* https://github.com/dominictarr/crypto-browserify
|
||||
* NOTE: Math.random() does not guarantee "cryptographic quality" but we actually don't need it
|
||||
*/
|
||||
function randomBytes (size) {
|
||||
var bytes = new Array(size);
|
||||
var r;
|
||||
|
||||
for (var i = 0, r; i < size; i++) {
|
||||
if ((i & 0x03) == 0) r = Math.random() * 0x100000000;
|
||||
bytes[i] = r >>> ((i & 0x03) << 3) & 0xff;
|
||||
}
|
||||
|
||||
return bytes;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Taken from the base64-js module
|
||||
* https://github.com/beatgammit/base64-js/
|
||||
*/
|
||||
function byteArrayToBase64 (uint8) {
|
||||
var lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
|
||||
, extraBytes = uint8.length % 3 // if we have 1 byte left, pad 2 bytes
|
||||
, output = ""
|
||||
, temp, length, i;
|
||||
|
||||
function tripletToBase64 (num) {
|
||||
return lookup[num >> 18 & 0x3F] + lookup[num >> 12 & 0x3F] + lookup[num >> 6 & 0x3F] + lookup[num & 0x3F];
|
||||
};
|
||||
|
||||
// go through the array every three bytes, we'll deal with trailing stuff later
|
||||
for (i = 0, length = uint8.length - extraBytes; i < length; i += 3) {
|
||||
temp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2]);
|
||||
output += tripletToBase64(temp);
|
||||
}
|
||||
|
||||
// pad the end with zeros, but make sure to not forget the extra bytes
|
||||
switch (extraBytes) {
|
||||
case 1:
|
||||
temp = uint8[uint8.length - 1];
|
||||
output += lookup[temp >> 2];
|
||||
output += lookup[(temp << 4) & 0x3F];
|
||||
output += '==';
|
||||
break;
|
||||
case 2:
|
||||
temp = (uint8[uint8.length - 2] << 8) + (uint8[uint8.length - 1]);
|
||||
output += lookup[temp >> 10];
|
||||
output += lookup[(temp >> 4) & 0x3F];
|
||||
output += lookup[(temp << 2) & 0x3F];
|
||||
output += '=';
|
||||
break;
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return a random alphanumerical string of length len
|
||||
* There is a very small probability (less than 1/1,000,000) for the length to be less than len
|
||||
* (il the base64 conversion yields too many pluses and slashes) but
|
||||
* that's not an issue here
|
||||
* The probability of a collision is extremely small (need 3*10^12 documents to have one chance in a million of a collision)
|
||||
* See http://en.wikipedia.org/wiki/Birthday_problem
|
||||
*/
|
||||
function uid (len) {
|
||||
return byteArrayToBase64(randomBytes(Math.ceil(Math.max(8, len * 2)))).replace(/[+\/]/g, '').slice(0, len);
|
||||
}
|
||||
|
||||
|
||||
|
||||
module.exports.uid = uid;
|
||||
95
resources/app/node_modules/nedb/browser-version/browser-specific/lib/storage.js
generated
vendored
Executable file
95
resources/app/node_modules/nedb/browser-version/browser-specific/lib/storage.js
generated
vendored
Executable file
@@ -0,0 +1,95 @@
|
||||
/**
|
||||
* Way data is stored for this database
|
||||
* For a Node.js/Node Webkit database it's the file system
|
||||
* For a browser-side database it's localforage, which uses the best backend available (IndexedDB then WebSQL then localStorage)
|
||||
*
|
||||
* This version is the browser version
|
||||
*/
|
||||
|
||||
var localforage = require('localforage')
|
||||
|
||||
// Configure localforage to display NeDB name for now. Would be a good idea to let user use his own app name
|
||||
localforage.config({
|
||||
name: 'NeDB'
|
||||
, storeName: 'nedbdata'
|
||||
});
|
||||
|
||||
|
||||
function exists (filename, callback) {
|
||||
localforage.getItem(filename, function (err, value) {
|
||||
if (value !== null) { // Even if value is undefined, localforage returns null
|
||||
return callback(true);
|
||||
} else {
|
||||
return callback(false);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function rename (filename, newFilename, callback) {
|
||||
localforage.getItem(filename, function (err, value) {
|
||||
if (value === null) {
|
||||
localforage.removeItem(newFilename, function () { return callback(); });
|
||||
} else {
|
||||
localforage.setItem(newFilename, value, function () {
|
||||
localforage.removeItem(filename, function () { return callback(); });
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function writeFile (filename, contents, options, callback) {
|
||||
// Options do not matter in browser setup
|
||||
if (typeof options === 'function') { callback = options; }
|
||||
localforage.setItem(filename, contents, function () { return callback(); });
|
||||
}
|
||||
|
||||
|
||||
function appendFile (filename, toAppend, options, callback) {
|
||||
// Options do not matter in browser setup
|
||||
if (typeof options === 'function') { callback = options; }
|
||||
|
||||
localforage.getItem(filename, function (err, contents) {
|
||||
contents = contents || '';
|
||||
contents += toAppend;
|
||||
localforage.setItem(filename, contents, function () { return callback(); });
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function readFile (filename, options, callback) {
|
||||
// Options do not matter in browser setup
|
||||
if (typeof options === 'function') { callback = options; }
|
||||
localforage.getItem(filename, function (err, contents) { return callback(null, contents || ''); });
|
||||
}
|
||||
|
||||
|
||||
function unlink (filename, callback) {
|
||||
localforage.removeItem(filename, function () { return callback(); });
|
||||
}
|
||||
|
||||
|
||||
// Nothing to do, no directories will be used on the browser
|
||||
function mkdirp (dir, callback) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
|
||||
// Nothing to do, no data corruption possible in the brower
|
||||
function ensureDatafileIntegrity (filename, callback) {
|
||||
return callback(null);
|
||||
}
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports.exists = exists;
|
||||
module.exports.rename = rename;
|
||||
module.exports.writeFile = writeFile;
|
||||
module.exports.crashSafeWriteFile = writeFile; // No need for a crash safe function in the browser
|
||||
module.exports.appendFile = appendFile;
|
||||
module.exports.readFile = readFile;
|
||||
module.exports.unlink = unlink;
|
||||
module.exports.mkdirp = mkdirp;
|
||||
module.exports.ensureDatafileIntegrity = ensureDatafileIntegrity;
|
||||
|
||||
101
resources/app/node_modules/nedb/browser-version/build.js
generated
vendored
Executable file
101
resources/app/node_modules/nedb/browser-version/build.js
generated
vendored
Executable file
@@ -0,0 +1,101 @@
|
||||
/**
|
||||
* Build the browser version of nedb
|
||||
*/
|
||||
|
||||
var fs = require('fs')
|
||||
, path = require('path')
|
||||
, child_process = require('child_process')
|
||||
, toCopy = ['lib', 'node_modules']
|
||||
, async, browserify, uglify
|
||||
;
|
||||
|
||||
// Ensuring both node_modules (the source one and build one), src and out directories exist
|
||||
function ensureDirExists (name) {
|
||||
try {
|
||||
fs.mkdirSync(path.join(__dirname, name));
|
||||
} catch (e) {
|
||||
if (e.code !== 'EEXIST') {
|
||||
console.log("Error ensuring that node_modules exists");
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
ensureDirExists('../node_modules');
|
||||
ensureDirExists('node_modules');
|
||||
ensureDirExists('out');
|
||||
ensureDirExists('src');
|
||||
|
||||
|
||||
// Installing build dependencies and require them
|
||||
console.log("Installing build dependencies");
|
||||
child_process.exec('npm install', { cwd: __dirname }, function (err, stdout, stderr) {
|
||||
if (err) { console.log("Error reinstalling dependencies"); process.exit(1); }
|
||||
|
||||
fs = require('fs-extra');
|
||||
async = require('async');
|
||||
browserify = require('browserify');
|
||||
uglify = require('uglify-js');
|
||||
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
console.log("Installing source dependencies if needed");
|
||||
|
||||
child_process.exec('npm install', { cwd: path.join(__dirname, '..') }, function (err) { return cb(err); });
|
||||
}
|
||||
, function (cb) {
|
||||
console.log("Removing contents of the src directory");
|
||||
|
||||
async.eachSeries(fs.readdirSync(path.join(__dirname, 'src')), function (item, _cb) {
|
||||
fs.remove(path.join(__dirname, 'src', item), _cb);
|
||||
}, cb);
|
||||
}
|
||||
, function (cb) {
|
||||
console.log("Copying source files");
|
||||
|
||||
async.eachSeries(toCopy, function (item, _cb) {
|
||||
fs.copy(path.join(__dirname, '..', item), path.join(__dirname, 'src', item), _cb);
|
||||
}, cb);
|
||||
}
|
||||
, function (cb) {
|
||||
console.log("Copying browser specific files to replace their server-specific counterparts");
|
||||
|
||||
async.eachSeries(fs.readdirSync(path.join(__dirname, 'browser-specific')), function (item, _cb) {
|
||||
fs.copy(path.join(__dirname, 'browser-specific', item), path.join(__dirname, 'src', item), _cb);
|
||||
}, cb);
|
||||
}
|
||||
, function (cb) {
|
||||
console.log("Browserifying the code");
|
||||
|
||||
var b = browserify()
|
||||
, srcPath = path.join(__dirname, 'src/lib/datastore.js');
|
||||
|
||||
b.add(srcPath);
|
||||
b.bundle({ standalone: 'Nedb' }, function (err, out) {
|
||||
if (err) { return cb(err); }
|
||||
fs.writeFile(path.join(__dirname, 'out/nedb.js'), out, 'utf8', function (err) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
} else {
|
||||
return cb(null, out);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
, function (out, cb) {
|
||||
console.log("Creating the minified version");
|
||||
|
||||
var compressedCode = uglify.minify(out, { fromString: true });
|
||||
fs.writeFile(path.join(__dirname, 'out/nedb.min.js'), compressedCode.code, 'utf8', cb);
|
||||
}
|
||||
], function (err) {
|
||||
if (err) {
|
||||
console.log("Error during build");
|
||||
console.log(err);
|
||||
} else {
|
||||
console.log("Build finished with success");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
|
||||
9278
resources/app/node_modules/nedb/browser-version/out/nedb.js
generated
vendored
Executable file
9278
resources/app/node_modules/nedb/browser-version/out/nedb.js
generated
vendored
Executable file
File diff suppressed because it is too large
Load Diff
4
resources/app/node_modules/nedb/browser-version/out/nedb.min.js
generated
vendored
Executable file
4
resources/app/node_modules/nedb/browser-version/out/nedb.min.js
generated
vendored
Executable file
File diff suppressed because one or more lines are too long
8
resources/app/node_modules/nedb/browser-version/package.json
generated
vendored
Executable file
8
resources/app/node_modules/nedb/browser-version/package.json
generated
vendored
Executable file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"async": "~0.2.9",
|
||||
"fs-extra": "~0.6.3",
|
||||
"uglify-js": "~2.3.6",
|
||||
"browserify": "~2.25.0"
|
||||
}
|
||||
}
|
||||
2
resources/app/node_modules/nedb/browser-version/test/async.js
generated
vendored
Executable file
2
resources/app/node_modules/nedb/browser-version/test/async.js
generated
vendored
Executable file
File diff suppressed because one or more lines are too long
5332
resources/app/node_modules/nedb/browser-version/test/chai.js
generated
vendored
Executable file
5332
resources/app/node_modules/nedb/browser-version/test/chai.js
generated
vendored
Executable file
File diff suppressed because it is too large
Load Diff
24
resources/app/node_modules/nedb/browser-version/test/index.html
generated
vendored
Executable file
24
resources/app/node_modules/nedb/browser-version/test/index.html
generated
vendored
Executable file
@@ -0,0 +1,24 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Mocha tests for NeDB</title>
|
||||
<link rel="stylesheet" href="mocha.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="mocha"></div>
|
||||
<script src="jquery.min.js"></script>
|
||||
<script src="chai.js"></script>
|
||||
<script src="underscore.min.js"></script>
|
||||
<script src="mocha.js"></script>
|
||||
<script>mocha.setup('bdd')</script>
|
||||
<script src="../out/nedb.min.js"></script>
|
||||
<script src="localforage.js"></script>
|
||||
<script src="nedb-browser.js"></script>
|
||||
<script>
|
||||
mocha.checkLeaks();
|
||||
mocha.globals(['jQuery']);
|
||||
mocha.run();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
4
resources/app/node_modules/nedb/browser-version/test/jquery.min.js
generated
vendored
Executable file
4
resources/app/node_modules/nedb/browser-version/test/jquery.min.js
generated
vendored
Executable file
File diff suppressed because one or more lines are too long
2758
resources/app/node_modules/nedb/browser-version/test/localforage.js
generated
vendored
Executable file
2758
resources/app/node_modules/nedb/browser-version/test/localforage.js
generated
vendored
Executable file
File diff suppressed because it is too large
Load Diff
199
resources/app/node_modules/nedb/browser-version/test/mocha.css
generated
vendored
Executable file
199
resources/app/node_modules/nedb/browser-version/test/mocha.css
generated
vendored
Executable file
@@ -0,0 +1,199 @@
|
||||
@charset "UTF-8";
|
||||
body {
|
||||
font: 20px/1.5 "Helvetica Neue", Helvetica, Arial, sans-serif;
|
||||
padding: 60px 50px;
|
||||
}
|
||||
|
||||
#mocha ul, #mocha li {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
#mocha ul {
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
#mocha h1, #mocha h2 {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
#mocha h1 {
|
||||
margin-top: 15px;
|
||||
font-size: 1em;
|
||||
font-weight: 200;
|
||||
}
|
||||
|
||||
#mocha h1 a {
|
||||
text-decoration: none;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
#mocha h1 a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
#mocha .suite .suite h1 {
|
||||
margin-top: 0;
|
||||
font-size: .8em;
|
||||
}
|
||||
|
||||
#mocha h2 {
|
||||
font-size: 12px;
|
||||
font-weight: normal;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#mocha .suite {
|
||||
margin-left: 15px;
|
||||
}
|
||||
|
||||
#mocha .test {
|
||||
margin-left: 15px;
|
||||
}
|
||||
|
||||
#mocha .test:hover h2::after {
|
||||
position: relative;
|
||||
top: 0;
|
||||
right: -10px;
|
||||
content: '(view source)';
|
||||
font-size: 12px;
|
||||
font-family: arial;
|
||||
color: #888;
|
||||
}
|
||||
|
||||
#mocha .test.pending:hover h2::after {
|
||||
content: '(pending)';
|
||||
font-family: arial;
|
||||
}
|
||||
|
||||
#mocha .test.pass.medium .duration {
|
||||
background: #C09853;
|
||||
}
|
||||
|
||||
#mocha .test.pass.slow .duration {
|
||||
background: #B94A48;
|
||||
}
|
||||
|
||||
#mocha .test.pass::before {
|
||||
content: '✓';
|
||||
font-size: 12px;
|
||||
display: block;
|
||||
float: left;
|
||||
margin-right: 5px;
|
||||
color: #00d6b2;
|
||||
}
|
||||
|
||||
#mocha .test.pass .duration {
|
||||
font-size: 9px;
|
||||
margin-left: 5px;
|
||||
padding: 2px 5px;
|
||||
color: white;
|
||||
-webkit-box-shadow: inset 0 1px 1px rgba(0,0,0,.2);
|
||||
-moz-box-shadow: inset 0 1px 1px rgba(0,0,0,.2);
|
||||
box-shadow: inset 0 1px 1px rgba(0,0,0,.2);
|
||||
-webkit-border-radius: 5px;
|
||||
-moz-border-radius: 5px;
|
||||
-ms-border-radius: 5px;
|
||||
-o-border-radius: 5px;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
#mocha .test.pass.fast .duration {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#mocha .test.pending {
|
||||
color: #0b97c4;
|
||||
}
|
||||
|
||||
#mocha .test.pending::before {
|
||||
content: '◦';
|
||||
color: #0b97c4;
|
||||
}
|
||||
|
||||
#mocha .test.fail {
|
||||
color: #c00;
|
||||
}
|
||||
|
||||
#mocha .test.fail pre {
|
||||
color: black;
|
||||
}
|
||||
|
||||
#mocha .test.fail::before {
|
||||
content: '✖';
|
||||
font-size: 12px;
|
||||
display: block;
|
||||
float: left;
|
||||
margin-right: 5px;
|
||||
color: #c00;
|
||||
}
|
||||
|
||||
#mocha .test pre.error {
|
||||
color: #c00;
|
||||
}
|
||||
|
||||
#mocha .test pre {
|
||||
display: inline-block;
|
||||
font: 12px/1.5 monaco, monospace;
|
||||
margin: 5px;
|
||||
padding: 15px;
|
||||
border: 1px solid #eee;
|
||||
border-bottom-color: #ddd;
|
||||
-webkit-border-radius: 3px;
|
||||
-webkit-box-shadow: 0 1px 3px #eee;
|
||||
}
|
||||
|
||||
#report.pass .test.fail {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#report.fail .test.pass {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#error {
|
||||
color: #c00;
|
||||
font-size: 1.5 em;
|
||||
font-weight: 100;
|
||||
letter-spacing: 1px;
|
||||
}
|
||||
|
||||
#stats {
|
||||
position: fixed;
|
||||
top: 15px;
|
||||
right: 10px;
|
||||
font-size: 12px;
|
||||
margin: 0;
|
||||
color: #888;
|
||||
}
|
||||
|
||||
#stats .progress {
|
||||
float: right;
|
||||
padding-top: 0;
|
||||
}
|
||||
|
||||
#stats em {
|
||||
color: black;
|
||||
}
|
||||
|
||||
#stats a {
|
||||
text-decoration: none;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
#stats a:hover {
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
|
||||
#stats li {
|
||||
display: inline-block;
|
||||
margin: 0 5px;
|
||||
list-style: none;
|
||||
padding-top: 11px;
|
||||
}
|
||||
|
||||
code .comment { color: #ddd }
|
||||
code .init { color: #2F6FAD }
|
||||
code .string { color: #5890AD }
|
||||
code .keyword { color: #8A6343 }
|
||||
code .number { color: #2F6FAD }
|
||||
4859
resources/app/node_modules/nedb/browser-version/test/mocha.js
generated
vendored
Executable file
4859
resources/app/node_modules/nedb/browser-version/test/mocha.js
generated
vendored
Executable file
File diff suppressed because it is too large
Load Diff
306
resources/app/node_modules/nedb/browser-version/test/nedb-browser.js
generated
vendored
Executable file
306
resources/app/node_modules/nedb/browser-version/test/nedb-browser.js
generated
vendored
Executable file
@@ -0,0 +1,306 @@
|
||||
/**
|
||||
* Testing the browser version of NeDB
|
||||
* The goal of these tests is not to be exhaustive, we have the server-side NeDB tests for that
|
||||
* This is more of a sanity check which executes most of the code at least once and checks
|
||||
* it behaves as the server version does
|
||||
*/
|
||||
|
||||
var assert = chai.assert;
|
||||
|
||||
/**
|
||||
* Given a docs array and an id, return the document whose id matches, or null if none is found
|
||||
*/
|
||||
function findById (docs, id) {
|
||||
return _.find(docs, function (doc) { return doc._id === id; }) || null;
|
||||
}
|
||||
|
||||
|
||||
describe('Basic CRUD functionality', function () {
|
||||
|
||||
it('Able to create a database object in the browser', function () {
|
||||
var db = new Nedb();
|
||||
|
||||
assert.equal(db.inMemoryOnly, true);
|
||||
assert.equal(db.persistence.inMemoryOnly, true);
|
||||
});
|
||||
|
||||
it('Insertion and querying', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.insert({ a: 4 }, function (err, newDoc1) {
|
||||
assert.isNull(err);
|
||||
db.insert({ a: 40 }, function (err, newDoc2) {
|
||||
assert.isNull(err);
|
||||
db.insert({ a: 400 }, function (err, newDoc3) {
|
||||
assert.isNull(err);
|
||||
|
||||
db.find({ a: { $gt: 36 } }, function (err, docs) {
|
||||
var doc2 = _.find(docs, function (doc) { return doc._id === newDoc2._id; })
|
||||
, doc3 = _.find(docs, function (doc) { return doc._id === newDoc3._id; })
|
||||
;
|
||||
|
||||
assert.isNull(err);
|
||||
assert.equal(docs.length, 2);
|
||||
assert.equal(doc2.a, 40);
|
||||
assert.equal(doc3.a, 400);
|
||||
|
||||
db.find({ a: { $lt: 36 } }, function (err, docs) {
|
||||
assert.isNull(err);
|
||||
assert.equal(docs.length, 1);
|
||||
assert.equal(docs[0].a, 4);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Querying with regular expressions', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.insert({ planet: 'Earth' }, function (err, newDoc1) {
|
||||
assert.isNull(err);
|
||||
db.insert({ planet: 'Mars' }, function (err, newDoc2) {
|
||||
assert.isNull(err);
|
||||
db.insert({ planet: 'Jupiter' }, function (err, newDoc3) {
|
||||
assert.isNull(err);
|
||||
db.insert({ planet: 'Eaaaaaarth' }, function (err, newDoc4) {
|
||||
assert.isNull(err);
|
||||
db.insert({ planet: 'Maaaars' }, function (err, newDoc5) {
|
||||
assert.isNull(err);
|
||||
|
||||
db.find({ planet: /ar/ }, function (err, docs) {
|
||||
assert.isNull(err);
|
||||
assert.equal(docs.length, 4);
|
||||
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc1._id; }).planet, 'Earth');
|
||||
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc2._id; }).planet, 'Mars');
|
||||
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc4._id; }).planet, 'Eaaaaaarth');
|
||||
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc5._id; }).planet, 'Maaaars');
|
||||
|
||||
db.find({ planet: /aa+r/ }, function (err, docs) {
|
||||
assert.isNull(err);
|
||||
assert.equal(docs.length, 2);
|
||||
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc4._id; }).planet, 'Eaaaaaarth');
|
||||
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc5._id; }).planet, 'Maaaars');
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Updating documents', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.insert({ planet: 'Eaaaaarth' }, function (err, newDoc1) {
|
||||
db.insert({ planet: 'Maaaaars' }, function (err, newDoc2) {
|
||||
// Simple update
|
||||
db.update({ _id: newDoc2._id }, { $set: { planet: 'Saturn' } }, {}, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 1);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 2);
|
||||
assert.equal(findById(docs, newDoc1._id).planet, 'Eaaaaarth');
|
||||
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn');
|
||||
|
||||
// Failing update
|
||||
db.update({ _id: 'unknown' }, { $inc: { count: 1 } }, {}, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 0);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 2);
|
||||
assert.equal(findById(docs, newDoc1._id).planet, 'Eaaaaarth');
|
||||
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn');
|
||||
|
||||
// Document replacement
|
||||
db.update({ planet: 'Eaaaaarth' }, { planet: 'Uranus' }, { multi: false }, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 1);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 2);
|
||||
assert.equal(findById(docs, newDoc1._id).planet, 'Uranus');
|
||||
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn');
|
||||
|
||||
// Multi update
|
||||
db.update({}, { $inc: { count: 3 } }, { multi: true }, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 2);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 2);
|
||||
assert.equal(findById(docs, newDoc1._id).planet, 'Uranus');
|
||||
assert.equal(findById(docs, newDoc1._id).count, 3);
|
||||
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn');
|
||||
assert.equal(findById(docs, newDoc2._id).count, 3);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Updating documents: special modifiers', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.insert({ planet: 'Earth' }, function (err, newDoc1) {
|
||||
// Pushing to an array
|
||||
db.update({}, { $push: { satellites: 'Phobos' } }, {}, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 1);
|
||||
|
||||
db.findOne({}, function (err, doc) {
|
||||
assert.deepEqual(doc, { planet: 'Earth', _id: newDoc1._id, satellites: ['Phobos'] });
|
||||
|
||||
db.update({}, { $push: { satellites: 'Deimos' } }, {}, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 1);
|
||||
|
||||
db.findOne({}, function (err, doc) {
|
||||
assert.deepEqual(doc, { planet: 'Earth', _id: newDoc1._id, satellites: ['Phobos', 'Deimos'] });
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Upserts', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.update({ a: 4 }, { $inc: { b: 1 } }, { upsert: true }, function (err, nr, upsert) {
|
||||
assert.isNull(err);
|
||||
// Return upserted document
|
||||
assert.equal(upsert.a, 4);
|
||||
assert.equal(upsert.b, 1);
|
||||
assert.equal(nr, 1);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 1);
|
||||
assert.equal(docs[0].a, 4);
|
||||
assert.equal(docs[0].b, 1);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Removing documents', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.insert({ a: 2 });
|
||||
db.insert({ a: 5 });
|
||||
db.insert({ a: 7 });
|
||||
|
||||
// Multi remove
|
||||
db.remove({ a: { $in: [ 5, 7 ] } }, { multi: true }, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 2);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 1);
|
||||
assert.equal(docs[0].a, 2);
|
||||
|
||||
// Remove with no match
|
||||
db.remove({ b: { $exists: true } }, { multi: true }, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 0);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 1);
|
||||
assert.equal(docs[0].a, 2);
|
||||
|
||||
// Simple remove
|
||||
db.remove({ a: { $exists: true } }, { multi: true }, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 1);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 0);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
}); // ==== End of 'Basic CRUD functionality' ==== //
|
||||
|
||||
|
||||
describe('Indexing', function () {
|
||||
|
||||
it('getCandidates works as expected', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.insert({ a: 4 }, function () {
|
||||
db.insert({ a: 6 }, function () {
|
||||
db.insert({ a: 7 }, function () {
|
||||
var candidates = db.getCandidates({ a: 6 })
|
||||
assert.equal(candidates.length, 3);
|
||||
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 4; }));
|
||||
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 6; }));
|
||||
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 7; }));
|
||||
|
||||
db.ensureIndex({ fieldName: 'a' });
|
||||
|
||||
candidates = db.getCandidates({ a: 6 })
|
||||
assert.equal(candidates.length, 1);
|
||||
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 6; }));
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Can use indexes to enforce a unique constraint', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.ensureIndex({ fieldName: 'u', unique: true });
|
||||
|
||||
db.insert({ u : 5 }, function (err) {
|
||||
assert.isNull(err);
|
||||
|
||||
db.insert({ u : 98 }, function (err) {
|
||||
assert.isNull(err);
|
||||
|
||||
db.insert({ u : 5 }, function (err) {
|
||||
assert.equal(err.errorType, 'uniqueViolated');
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
}); // ==== End of 'Indexing' ==== //
|
||||
|
||||
|
||||
describe("Don't forget to launch persistence tests!", function () {
|
||||
|
||||
it("See file testPersistence.html", function (done) {
|
||||
done();
|
||||
});
|
||||
|
||||
}); // ===== End of 'persistent in-browser database' =====
|
||||
|
||||
|
||||
11
resources/app/node_modules/nedb/browser-version/test/playground.html
generated
vendored
Executable file
11
resources/app/node_modules/nedb/browser-version/test/playground.html
generated
vendored
Executable file
@@ -0,0 +1,11 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Playground for NeDB</title>
|
||||
</head>
|
||||
<body>
|
||||
<script src="../out/nedb.min.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
16
resources/app/node_modules/nedb/browser-version/test/testLoad.html
generated
vendored
Executable file
16
resources/app/node_modules/nedb/browser-version/test/testLoad.html
generated
vendored
Executable file
@@ -0,0 +1,16 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Test NeDB persistence load in the browser</title>
|
||||
<link rel="stylesheet" href="mocha.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="results"></div>
|
||||
<script src="./localforage.js"></script>
|
||||
<script src="./async.js"></script>
|
||||
<script src="../out/nedb.js"></script>
|
||||
<script src="./testLoad.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
111
resources/app/node_modules/nedb/browser-version/test/testLoad.js
generated
vendored
Executable file
111
resources/app/node_modules/nedb/browser-version/test/testLoad.js
generated
vendored
Executable file
@@ -0,0 +1,111 @@
|
||||
console.log('BEGINNING');
|
||||
|
||||
var N = 50000
|
||||
, db = new Nedb({ filename: 'loadTest', autoload: true })
|
||||
, t, i
|
||||
, sample = JSON.stringify({ data: Math.random(), _id: Math.random() });
|
||||
;
|
||||
|
||||
// Some inserts in sequence, using the default storage mechanism (IndexedDB in my case)
|
||||
function someInserts (sn, N, callback) {
|
||||
var i = 0, beg = Date.now();
|
||||
async.whilst( function () { return i < N; }
|
||||
, function (_cb) {
|
||||
db.insert({ data: Math.random() }, function (err) { i += 1; return _cb(err); });
|
||||
}
|
||||
, function (err) {
|
||||
console.log("Inserts, series " + sn + " " + (Date.now() - beg));
|
||||
return callback(err);
|
||||
});
|
||||
}
|
||||
|
||||
// Manually updating the localStorage on the same variable
|
||||
function someLS (sn, N, callback) {
|
||||
var i = 0, beg = Date.now();
|
||||
for (i = 0; i < N; i += 1) {
|
||||
localStorage.setItem('loadTestLS', getItem('loadTestLS') + sample);
|
||||
}
|
||||
console.log("localStorage, series " + sn + " " + (Date.now() - beg));
|
||||
return callback();
|
||||
}
|
||||
|
||||
// Manually updating the localStorage on different variables
|
||||
function someLSDiff (sn, N, callback) {
|
||||
var i = 0, beg = Date.now();
|
||||
for (i = 0; i < N; i += 1) {
|
||||
localStorage.setItem('loadTestLS-' + i, sample);
|
||||
}
|
||||
console.log("localStorage, series " + sn + " " + (Date.now() - beg));
|
||||
return callback();
|
||||
}
|
||||
|
||||
// Manually updating the localforage default on the same variable (IndexedDB on my machine)
|
||||
function someLF (sn, N, callback) {
|
||||
var i = 0, beg = Date.now();
|
||||
async.whilst( function () { return i < N; }
|
||||
, function (_cb) {
|
||||
localforage.getItem('loadTestLF', function (err, value) {
|
||||
if (err) { return _cb(err); }
|
||||
localforage.setItem('loadTestLF', value + sample, function (err) { i += 1; return _cb(err); });
|
||||
});
|
||||
}
|
||||
, function (err) {
|
||||
console.log("localForage/IDB, series " + sn + " " + (Date.now() - beg));
|
||||
return callback(err);
|
||||
});
|
||||
}
|
||||
|
||||
// Manually updating the localforage default on the different variables (IndexedDB on my machine)
|
||||
function someLFDiff (sn, N, callback) {
|
||||
var i = 0, beg = Date.now();
|
||||
async.whilst( function () { return i < N; }
|
||||
, function (_cb) {
|
||||
localforage.setItem('loadTestLF-' + i, sample, function (err) { i += 1; return _cb(err); });
|
||||
}
|
||||
, function (err) {
|
||||
console.log("localForage/IDB, series " + sn + " " + (Date.now() - beg));
|
||||
return callback(err);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
localStorage.setItem('loadTestLS', '');
|
||||
async.waterfall([
|
||||
function (cb) { db.remove({}, { multi: true }, function (err) { return cb(err); }); }
|
||||
|
||||
// Slow and gets slower with database size
|
||||
//, async.apply(someInserts, "#1", N) // N=5000, 141s
|
||||
//, async.apply(someInserts, "#2", N) // N=5000, 208s
|
||||
//, async.apply(someInserts, "#3", N) // N=5000, 281s
|
||||
//, async.apply(someInserts, "#4", N) // N=5000, 350s
|
||||
|
||||
// Slow and gets slower really fast with database size, then outright crashes
|
||||
//, async.apply(someLS, "#1", N) // N=4000, 2.5s
|
||||
//, async.apply(someLS, "#2", N) // N=4000, 8.0s
|
||||
//, async.apply(someLS, "#3", N) // N=4000, 26.5s
|
||||
//, async.apply(someLS, "#4", N) // N=4000, 47.8s then crash, can't get string (with N=5000 crash happens on second pass)
|
||||
|
||||
// Much faster and more consistent
|
||||
//, async.apply(someLSDiff, "#1", N) // N=50000, 0.7s
|
||||
//, async.apply(someLSDiff, "#2", N) // N=50000, 0.5s
|
||||
//, async.apply(someLSDiff, "#3", N) // N=50000, 0.5s
|
||||
//, async.apply(someLSDiff, "#4", N) // N=50000, 0.5s
|
||||
|
||||
// Slow and gets slower with database size
|
||||
//, function (cb) { localforage.setItem('loadTestLF', '', function (err) { return cb(err) }) }
|
||||
//, async.apply(someLF, "#1", N) // N=5000, 69s
|
||||
//, async.apply(someLF, "#2", N) // N=5000, 108s
|
||||
//, async.apply(someLF, "#3", N) // N=5000, 137s
|
||||
//, async.apply(someLF, "#4", N) // N=5000, 169s
|
||||
|
||||
// Quite fast and speed doesn't change with database size (tested with N=10000 and N=50000, still no slow-down)
|
||||
//, async.apply(someLFDiff, "#1", N) // N=5000, 18s
|
||||
//, async.apply(someLFDiff, "#2", N) // N=5000, 18s
|
||||
//, async.apply(someLFDiff, "#3", N) // N=5000, 18s
|
||||
//, async.apply(someLFDiff, "#4", N) // N=5000, 18s
|
||||
]);
|
||||
|
||||
|
||||
|
||||
|
||||
13
resources/app/node_modules/nedb/browser-version/test/testPersistence.html
generated
vendored
Executable file
13
resources/app/node_modules/nedb/browser-version/test/testPersistence.html
generated
vendored
Executable file
@@ -0,0 +1,13 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Test NeDB persistence in the browser</title>
|
||||
<link rel="stylesheet" href="mocha.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="results"></div>
|
||||
<script src="../out/nedb.js"></script>
|
||||
<script src="./testPersistence.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
20
resources/app/node_modules/nedb/browser-version/test/testPersistence.js
generated
vendored
Executable file
20
resources/app/node_modules/nedb/browser-version/test/testPersistence.js
generated
vendored
Executable file
@@ -0,0 +1,20 @@
|
||||
console.log("Beginning tests");
|
||||
console.log("Please note these tests work on Chrome latest, might not work on other browsers due to discrepancies in how local storage works for the file:// protocol");
|
||||
|
||||
function testsFailed () {
|
||||
document.getElementById("results").innerHTML = "TESTS FAILED";
|
||||
}
|
||||
|
||||
var filename = 'test';
|
||||
|
||||
var db = new Nedb({ filename: filename, autoload: true });
|
||||
db.remove({}, { multi: true }, function () {
|
||||
db.insert({ hello: 'world' }, function (err) {
|
||||
if (err) {
|
||||
testsFailed();
|
||||
return;
|
||||
}
|
||||
|
||||
window.location = './testPersistence2.html';
|
||||
});
|
||||
});
|
||||
14
resources/app/node_modules/nedb/browser-version/test/testPersistence2.html
generated
vendored
Executable file
14
resources/app/node_modules/nedb/browser-version/test/testPersistence2.html
generated
vendored
Executable file
@@ -0,0 +1,14 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Test NeDB persistence in the browser - Results</title>
|
||||
<link rel="stylesheet" href="mocha.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="results"></div>
|
||||
<script src="jquery.min.js"></script>
|
||||
<script src="../out/nedb.js"></script>
|
||||
<script src="./testPersistence2.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
39
resources/app/node_modules/nedb/browser-version/test/testPersistence2.js
generated
vendored
Executable file
39
resources/app/node_modules/nedb/browser-version/test/testPersistence2.js
generated
vendored
Executable file
@@ -0,0 +1,39 @@
|
||||
// Capture F5 to reload the base page testPersistence.html not this one
|
||||
$(document).on('keydown', function (e) {
|
||||
if (e.keyCode === 116) {
|
||||
e.preventDefault();
|
||||
window.location = 'testPersistence.html';
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
console.log("Checking tests results");
|
||||
console.log("Please note these tests work on Chrome latest, might not work on other browsers due to discrepancies in how local storage works for the file:// protocol");
|
||||
|
||||
function testsFailed () {
|
||||
document.getElementById("results").innerHTML = "TESTS FAILED";
|
||||
}
|
||||
|
||||
var filename = 'test';
|
||||
|
||||
var db = new Nedb({ filename: filename, autoload: true });
|
||||
db.find({}, function (err, docs) {
|
||||
if (docs.length !== 1) {
|
||||
console.log(docs);
|
||||
console.log("Unexpected length of document database");
|
||||
return testsFailed();
|
||||
}
|
||||
|
||||
if (Object.keys(docs[0]).length !== 2) {
|
||||
console.log("Unexpected length insert document in database");
|
||||
return testsFailed();
|
||||
}
|
||||
|
||||
if (docs[0].hello !== 'world') {
|
||||
console.log("Unexpected document");
|
||||
return testsFailed();
|
||||
}
|
||||
|
||||
document.getElementById("results").innerHTML = "BROWSER PERSISTENCE TEST PASSED";
|
||||
});
|
||||
|
||||
6
resources/app/node_modules/nedb/browser-version/test/underscore.min.js
generated
vendored
Executable file
6
resources/app/node_modules/nedb/browser-version/test/underscore.min.js
generated
vendored
Executable file
File diff suppressed because one or more lines are too long
3
resources/app/node_modules/nedb/index.js
generated
vendored
Executable file
3
resources/app/node_modules/nedb/index.js
generated
vendored
Executable file
@@ -0,0 +1,3 @@
|
||||
var Datastore = require('./lib/datastore');
|
||||
|
||||
module.exports = Datastore;
|
||||
204
resources/app/node_modules/nedb/lib/cursor.js
generated
vendored
Executable file
204
resources/app/node_modules/nedb/lib/cursor.js
generated
vendored
Executable file
@@ -0,0 +1,204 @@
|
||||
/**
|
||||
* Manage access to data, be it to find, update or remove it
|
||||
*/
|
||||
var model = require('./model')
|
||||
, _ = require('underscore')
|
||||
;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Create a new cursor for this collection
|
||||
* @param {Datastore} db - The datastore this cursor is bound to
|
||||
* @param {Query} query - The query this cursor will operate on
|
||||
* @param {Function} execFn - Handler to be executed after cursor has found the results and before the callback passed to find/findOne/update/remove
|
||||
*/
|
||||
function Cursor (db, query, execFn) {
|
||||
this.db = db;
|
||||
this.query = query || {};
|
||||
if (execFn) { this.execFn = execFn; }
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set a limit to the number of results
|
||||
*/
|
||||
Cursor.prototype.limit = function(limit) {
|
||||
this._limit = limit;
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Skip a the number of results
|
||||
*/
|
||||
Cursor.prototype.skip = function(skip) {
|
||||
this._skip = skip;
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Sort results of the query
|
||||
* @param {SortQuery} sortQuery - SortQuery is { field: order }, field can use the dot-notation, order is 1 for ascending and -1 for descending
|
||||
*/
|
||||
Cursor.prototype.sort = function(sortQuery) {
|
||||
this._sort = sortQuery;
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Add the use of a projection
|
||||
* @param {Object} projection - MongoDB-style projection. {} means take all fields. Then it's { key1: 1, key2: 1 } to take only key1 and key2
|
||||
* { key1: 0, key2: 0 } to omit only key1 and key2. Except _id, you can't mix takes and omits
|
||||
*/
|
||||
Cursor.prototype.projection = function(projection) {
|
||||
this._projection = projection;
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Apply the projection
|
||||
*/
|
||||
Cursor.prototype.project = function (candidates) {
|
||||
var res = [], self = this
|
||||
, keepId, action, keys
|
||||
;
|
||||
|
||||
if (this._projection === undefined || Object.keys(this._projection).length === 0) {
|
||||
return candidates;
|
||||
}
|
||||
|
||||
keepId = this._projection._id === 0 ? false : true;
|
||||
this._projection = _.omit(this._projection, '_id');
|
||||
|
||||
// Check for consistency
|
||||
keys = Object.keys(this._projection);
|
||||
keys.forEach(function (k) {
|
||||
if (action !== undefined && self._projection[k] !== action) { throw new Error("Can't both keep and omit fields except for _id"); }
|
||||
action = self._projection[k];
|
||||
});
|
||||
|
||||
// Do the actual projection
|
||||
candidates.forEach(function (candidate) {
|
||||
var toPush;
|
||||
if (action === 1) { // pick-type projection
|
||||
toPush = { $set: {} };
|
||||
keys.forEach(function (k) {
|
||||
toPush.$set[k] = model.getDotValue(candidate, k);
|
||||
if (toPush.$set[k] === undefined) { delete toPush.$set[k]; }
|
||||
});
|
||||
toPush = model.modify({}, toPush);
|
||||
} else { // omit-type projection
|
||||
toPush = { $unset: {} };
|
||||
keys.forEach(function (k) { toPush.$unset[k] = true });
|
||||
toPush = model.modify(candidate, toPush);
|
||||
}
|
||||
if (keepId) {
|
||||
toPush._id = candidate._id;
|
||||
} else {
|
||||
delete toPush._id;
|
||||
}
|
||||
res.push(toPush);
|
||||
});
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get all matching elements
|
||||
* Will return pointers to matched elements (shallow copies), returning full copies is the role of find or findOne
|
||||
* This is an internal function, use exec which uses the executor
|
||||
*
|
||||
* @param {Function} callback - Signature: err, results
|
||||
*/
|
||||
Cursor.prototype._exec = function(_callback) {
|
||||
var res = [], added = 0, skipped = 0, self = this
|
||||
, error = null
|
||||
, i, keys, key
|
||||
;
|
||||
|
||||
function callback (error, res) {
|
||||
if (self.execFn) {
|
||||
return self.execFn(error, res, _callback);
|
||||
} else {
|
||||
return _callback(error, res);
|
||||
}
|
||||
}
|
||||
|
||||
this.db.getCandidates(this.query, function (err, candidates) {
|
||||
if (err) { return callback(err); }
|
||||
|
||||
try {
|
||||
for (i = 0; i < candidates.length; i += 1) {
|
||||
if (model.match(candidates[i], self.query)) {
|
||||
// If a sort is defined, wait for the results to be sorted before applying limit and skip
|
||||
if (!self._sort) {
|
||||
if (self._skip && self._skip > skipped) {
|
||||
skipped += 1;
|
||||
} else {
|
||||
res.push(candidates[i]);
|
||||
added += 1;
|
||||
if (self._limit && self._limit <= added) { break; }
|
||||
}
|
||||
} else {
|
||||
res.push(candidates[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// Apply all sorts
|
||||
if (self._sort) {
|
||||
keys = Object.keys(self._sort);
|
||||
|
||||
// Sorting
|
||||
var criteria = [];
|
||||
for (i = 0; i < keys.length; i++) {
|
||||
key = keys[i];
|
||||
criteria.push({ key: key, direction: self._sort[key] });
|
||||
}
|
||||
res.sort(function(a, b) {
|
||||
var criterion, compare, i;
|
||||
for (i = 0; i < criteria.length; i++) {
|
||||
criterion = criteria[i];
|
||||
compare = criterion.direction * model.compareThings(model.getDotValue(a, criterion.key), model.getDotValue(b, criterion.key), self.db.compareStrings);
|
||||
if (compare !== 0) {
|
||||
return compare;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
|
||||
// Applying limit and skip
|
||||
var limit = self._limit || res.length
|
||||
, skip = self._skip || 0;
|
||||
|
||||
res = res.slice(skip, skip + limit);
|
||||
}
|
||||
|
||||
// Apply projection
|
||||
try {
|
||||
res = self.project(res);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
res = undefined;
|
||||
}
|
||||
|
||||
return callback(error, res);
|
||||
});
|
||||
};
|
||||
|
||||
Cursor.prototype.exec = function () {
|
||||
this.db.executor.push({ this: this, fn: this._exec, arguments: arguments });
|
||||
};
|
||||
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = Cursor;
|
||||
22
resources/app/node_modules/nedb/lib/customUtils.js
generated
vendored
Executable file
22
resources/app/node_modules/nedb/lib/customUtils.js
generated
vendored
Executable file
@@ -0,0 +1,22 @@
|
||||
var crypto = require('crypto')
|
||||
;
|
||||
|
||||
/**
|
||||
* Return a random alphanumerical string of length len
|
||||
* There is a very small probability (less than 1/1,000,000) for the length to be less than len
|
||||
* (il the base64 conversion yields too many pluses and slashes) but
|
||||
* that's not an issue here
|
||||
* The probability of a collision is extremely small (need 3*10^12 documents to have one chance in a million of a collision)
|
||||
* See http://en.wikipedia.org/wiki/Birthday_problem
|
||||
*/
|
||||
function uid (len) {
|
||||
return crypto.randomBytes(Math.ceil(Math.max(8, len * 2)))
|
||||
.toString('base64')
|
||||
.replace(/[+\/]/g, '')
|
||||
.slice(0, len);
|
||||
}
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports.uid = uid;
|
||||
|
||||
704
resources/app/node_modules/nedb/lib/datastore.js
generated
vendored
Executable file
704
resources/app/node_modules/nedb/lib/datastore.js
generated
vendored
Executable file
@@ -0,0 +1,704 @@
|
||||
var customUtils = require('./customUtils')
|
||||
, model = require('./model')
|
||||
, async = require('async')
|
||||
, Executor = require('./executor')
|
||||
, Index = require('./indexes')
|
||||
, util = require('util')
|
||||
, _ = require('underscore')
|
||||
, Persistence = require('./persistence')
|
||||
, Cursor = require('./cursor')
|
||||
;
|
||||
|
||||
|
||||
/**
|
||||
* Create a new collection
|
||||
* @param {String} options.filename Optional, datastore will be in-memory only if not provided
|
||||
* @param {Boolean} options.timestampData Optional, defaults to false. If set to true, createdAt and updatedAt will be created and populated automatically (if not specified by user)
|
||||
* @param {Boolean} options.inMemoryOnly Optional, defaults to false
|
||||
* @param {String} options.nodeWebkitAppName Optional, specify the name of your NW app if you want options.filename to be relative to the directory where
|
||||
* Node Webkit stores application data such as cookies and local storage (the best place to store data in my opinion)
|
||||
* @param {Boolean} options.autoload Optional, defaults to false
|
||||
* @param {Function} options.onload Optional, if autoload is used this will be called after the load database with the error object as parameter. If you don't pass it the error will be thrown
|
||||
* @param {Function} options.afterSerialization/options.beforeDeserialization Optional, serialization hooks
|
||||
* @param {Number} options.corruptAlertThreshold Optional, threshold after which an alert is thrown if too much data is corrupt
|
||||
* @param {Function} options.compareStrings Optional, string comparison function that overrides default for sorting
|
||||
*
|
||||
* Event Emitter - Events
|
||||
* * compaction.done - Fired whenever a compaction operation was finished
|
||||
*/
|
||||
function Datastore (options) {
|
||||
var filename;
|
||||
|
||||
// Retrocompatibility with v0.6 and before
|
||||
if (typeof options === 'string') {
|
||||
filename = options;
|
||||
this.inMemoryOnly = false; // Default
|
||||
} else {
|
||||
options = options || {};
|
||||
filename = options.filename;
|
||||
this.inMemoryOnly = options.inMemoryOnly || false;
|
||||
this.autoload = options.autoload || false;
|
||||
this.timestampData = options.timestampData || false;
|
||||
}
|
||||
|
||||
// Determine whether in memory or persistent
|
||||
if (!filename || typeof filename !== 'string' || filename.length === 0) {
|
||||
this.filename = null;
|
||||
this.inMemoryOnly = true;
|
||||
} else {
|
||||
this.filename = filename;
|
||||
}
|
||||
|
||||
// String comparison function
|
||||
this.compareStrings = options.compareStrings;
|
||||
|
||||
// Persistence handling
|
||||
this.persistence = new Persistence({ db: this, nodeWebkitAppName: options.nodeWebkitAppName
|
||||
, afterSerialization: options.afterSerialization
|
||||
, beforeDeserialization: options.beforeDeserialization
|
||||
, corruptAlertThreshold: options.corruptAlertThreshold
|
||||
});
|
||||
|
||||
// This new executor is ready if we don't use persistence
|
||||
// If we do, it will only be ready once loadDatabase is called
|
||||
this.executor = new Executor();
|
||||
if (this.inMemoryOnly) { this.executor.ready = true; }
|
||||
|
||||
// Indexed by field name, dot notation can be used
|
||||
// _id is always indexed and since _ids are generated randomly the underlying
|
||||
// binary is always well-balanced
|
||||
this.indexes = {};
|
||||
this.indexes._id = new Index({ fieldName: '_id', unique: true });
|
||||
this.ttlIndexes = {};
|
||||
|
||||
// Queue a load of the database right away and call the onload handler
|
||||
// By default (no onload handler), if there is an error there, no operation will be possible so warn the user by throwing an exception
|
||||
if (this.autoload) { this.loadDatabase(options.onload || function (err) {
|
||||
if (err) { throw err; }
|
||||
}); }
|
||||
}
|
||||
|
||||
util.inherits(Datastore, require('events').EventEmitter);
|
||||
|
||||
|
||||
/**
|
||||
* Load the database from the datafile, and trigger the execution of buffered commands if any
|
||||
*/
|
||||
Datastore.prototype.loadDatabase = function () {
|
||||
this.executor.push({ this: this.persistence, fn: this.persistence.loadDatabase, arguments: arguments }, true);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get an array of all the data in the database
|
||||
*/
|
||||
Datastore.prototype.getAllData = function () {
|
||||
return this.indexes._id.getAll();
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Reset all currently defined indexes
|
||||
*/
|
||||
Datastore.prototype.resetIndexes = function (newData) {
|
||||
var self = this;
|
||||
|
||||
Object.keys(this.indexes).forEach(function (i) {
|
||||
self.indexes[i].reset(newData);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Ensure an index is kept for this field. Same parameters as lib/indexes
|
||||
* For now this function is synchronous, we need to test how much time it takes
|
||||
* We use an async API for consistency with the rest of the code
|
||||
* @param {String} options.fieldName
|
||||
* @param {Boolean} options.unique
|
||||
* @param {Boolean} options.sparse
|
||||
* @param {Number} options.expireAfterSeconds - Optional, if set this index becomes a TTL index (only works on Date fields, not arrays of Date)
|
||||
* @param {Function} cb Optional callback, signature: err
|
||||
*/
|
||||
Datastore.prototype.ensureIndex = function (options, cb) {
|
||||
var err
|
||||
, callback = cb || function () {};
|
||||
|
||||
options = options || {};
|
||||
|
||||
if (!options.fieldName) {
|
||||
err = new Error("Cannot create an index without a fieldName");
|
||||
err.missingFieldName = true;
|
||||
return callback(err);
|
||||
}
|
||||
if (this.indexes[options.fieldName]) { return callback(null); }
|
||||
|
||||
this.indexes[options.fieldName] = new Index(options);
|
||||
if (options.expireAfterSeconds !== undefined) { this.ttlIndexes[options.fieldName] = options.expireAfterSeconds; } // With this implementation index creation is not necessary to ensure TTL but we stick with MongoDB's API here
|
||||
|
||||
try {
|
||||
this.indexes[options.fieldName].insert(this.getAllData());
|
||||
} catch (e) {
|
||||
delete this.indexes[options.fieldName];
|
||||
return callback(e);
|
||||
}
|
||||
|
||||
// We may want to force all options to be persisted including defaults, not just the ones passed the index creation function
|
||||
this.persistence.persistNewState([{ $$indexCreated: options }], function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove an index
|
||||
* @param {String} fieldName
|
||||
* @param {Function} cb Optional callback, signature: err
|
||||
*/
|
||||
Datastore.prototype.removeIndex = function (fieldName, cb) {
|
||||
var callback = cb || function () {};
|
||||
|
||||
delete this.indexes[fieldName];
|
||||
|
||||
this.persistence.persistNewState([{ $$indexRemoved: fieldName }], function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Add one or several document(s) to all indexes
|
||||
*/
|
||||
Datastore.prototype.addToIndexes = function (doc) {
|
||||
var i, failingIndex, error
|
||||
, keys = Object.keys(this.indexes)
|
||||
;
|
||||
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
try {
|
||||
this.indexes[keys[i]].insert(doc);
|
||||
} catch (e) {
|
||||
failingIndex = i;
|
||||
error = e;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If an error happened, we need to rollback the insert on all other indexes
|
||||
if (error) {
|
||||
for (i = 0; i < failingIndex; i += 1) {
|
||||
this.indexes[keys[i]].remove(doc);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove one or several document(s) from all indexes
|
||||
*/
|
||||
Datastore.prototype.removeFromIndexes = function (doc) {
|
||||
var self = this;
|
||||
|
||||
Object.keys(this.indexes).forEach(function (i) {
|
||||
self.indexes[i].remove(doc);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update one or several documents in all indexes
|
||||
* To update multiple documents, oldDoc must be an array of { oldDoc, newDoc } pairs
|
||||
* If one update violates a constraint, all changes are rolled back
|
||||
*/
|
||||
Datastore.prototype.updateIndexes = function (oldDoc, newDoc) {
|
||||
var i, failingIndex, error
|
||||
, keys = Object.keys(this.indexes)
|
||||
;
|
||||
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
try {
|
||||
this.indexes[keys[i]].update(oldDoc, newDoc);
|
||||
} catch (e) {
|
||||
failingIndex = i;
|
||||
error = e;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If an error happened, we need to rollback the update on all other indexes
|
||||
if (error) {
|
||||
for (i = 0; i < failingIndex; i += 1) {
|
||||
this.indexes[keys[i]].revertUpdate(oldDoc, newDoc);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Return the list of candidates for a given query
|
||||
* Crude implementation for now, we return the candidates given by the first usable index if any
|
||||
* We try the following query types, in this order: basic match, $in match, comparison match
|
||||
* One way to make it better would be to enable the use of multiple indexes if the first usable index
|
||||
* returns too much data. I may do it in the future.
|
||||
*
|
||||
* Returned candidates will be scanned to find and remove all expired documents
|
||||
*
|
||||
* @param {Query} query
|
||||
* @param {Boolean} dontExpireStaleDocs Optional, defaults to false, if true don't remove stale docs. Useful for the remove function which shouldn't be impacted by expirations
|
||||
* @param {Function} callback Signature err, docs
|
||||
*/
|
||||
Datastore.prototype.getCandidates = function (query, dontExpireStaleDocs, callback) {
|
||||
var indexNames = Object.keys(this.indexes)
|
||||
, self = this
|
||||
, usableQueryKeys;
|
||||
|
||||
if (typeof dontExpireStaleDocs === 'function') {
|
||||
callback = dontExpireStaleDocs;
|
||||
dontExpireStaleDocs = false;
|
||||
}
|
||||
|
||||
async.waterfall([
|
||||
// STEP 1: get candidates list by checking indexes from most to least frequent usecase
|
||||
function (cb) {
|
||||
// For a basic match
|
||||
usableQueryKeys = [];
|
||||
Object.keys(query).forEach(function (k) {
|
||||
if (typeof query[k] === 'string' || typeof query[k] === 'number' || typeof query[k] === 'boolean' || util.isDate(query[k]) || query[k] === null) {
|
||||
usableQueryKeys.push(k);
|
||||
}
|
||||
});
|
||||
usableQueryKeys = _.intersection(usableQueryKeys, indexNames);
|
||||
if (usableQueryKeys.length > 0) {
|
||||
return cb(null, self.indexes[usableQueryKeys[0]].getMatching(query[usableQueryKeys[0]]));
|
||||
}
|
||||
|
||||
// For a $in match
|
||||
usableQueryKeys = [];
|
||||
Object.keys(query).forEach(function (k) {
|
||||
if (query[k] && query[k].hasOwnProperty('$in')) {
|
||||
usableQueryKeys.push(k);
|
||||
}
|
||||
});
|
||||
usableQueryKeys = _.intersection(usableQueryKeys, indexNames);
|
||||
if (usableQueryKeys.length > 0) {
|
||||
return cb(null, self.indexes[usableQueryKeys[0]].getMatching(query[usableQueryKeys[0]].$in));
|
||||
}
|
||||
|
||||
// For a comparison match
|
||||
usableQueryKeys = [];
|
||||
Object.keys(query).forEach(function (k) {
|
||||
if (query[k] && (query[k].hasOwnProperty('$lt') || query[k].hasOwnProperty('$lte') || query[k].hasOwnProperty('$gt') || query[k].hasOwnProperty('$gte'))) {
|
||||
usableQueryKeys.push(k);
|
||||
}
|
||||
});
|
||||
usableQueryKeys = _.intersection(usableQueryKeys, indexNames);
|
||||
if (usableQueryKeys.length > 0) {
|
||||
return cb(null, self.indexes[usableQueryKeys[0]].getBetweenBounds(query[usableQueryKeys[0]]));
|
||||
}
|
||||
|
||||
// By default, return all the DB data
|
||||
return cb(null, self.getAllData());
|
||||
}
|
||||
// STEP 2: remove all expired documents
|
||||
, function (docs) {
|
||||
if (dontExpireStaleDocs) { return callback(null, docs); }
|
||||
|
||||
var expiredDocsIds = [], validDocs = [], ttlIndexesFieldNames = Object.keys(self.ttlIndexes);
|
||||
|
||||
docs.forEach(function (doc) {
|
||||
var valid = true;
|
||||
ttlIndexesFieldNames.forEach(function (i) {
|
||||
if (doc[i] !== undefined && util.isDate(doc[i]) && Date.now() > doc[i].getTime() + self.ttlIndexes[i] * 1000) {
|
||||
valid = false;
|
||||
}
|
||||
});
|
||||
if (valid) { validDocs.push(doc); } else { expiredDocsIds.push(doc._id); }
|
||||
});
|
||||
|
||||
async.eachSeries(expiredDocsIds, function (_id, cb) {
|
||||
self._remove({ _id: _id }, {}, function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return cb();
|
||||
});
|
||||
}, function (err) {
|
||||
return callback(null, validDocs);
|
||||
});
|
||||
}]);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Insert a new document
|
||||
* @param {Function} cb Optional callback, signature: err, insertedDoc
|
||||
*
|
||||
* @api private Use Datastore.insert which has the same signature
|
||||
*/
|
||||
Datastore.prototype._insert = function (newDoc, cb) {
|
||||
var callback = cb || function () {}
|
||||
, preparedDoc
|
||||
;
|
||||
|
||||
try {
|
||||
preparedDoc = this.prepareDocumentForInsertion(newDoc)
|
||||
this._insertInCache(preparedDoc);
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
|
||||
this.persistence.persistNewState(util.isArray(preparedDoc) ? preparedDoc : [preparedDoc], function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null, model.deepCopy(preparedDoc));
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a new _id that's not already in use
|
||||
*/
|
||||
Datastore.prototype.createNewId = function () {
|
||||
var tentativeId = customUtils.uid(16);
|
||||
// Try as many times as needed to get an unused _id. As explained in customUtils, the probability of this ever happening is extremely small, so this is O(1)
|
||||
if (this.indexes._id.getMatching(tentativeId).length > 0) {
|
||||
tentativeId = this.createNewId();
|
||||
}
|
||||
return tentativeId;
|
||||
};
|
||||
|
||||
/**
|
||||
* Prepare a document (or array of documents) to be inserted in a database
|
||||
* Meaning adds _id and timestamps if necessary on a copy of newDoc to avoid any side effect on user input
|
||||
* @api private
|
||||
*/
|
||||
Datastore.prototype.prepareDocumentForInsertion = function (newDoc) {
|
||||
var preparedDoc, self = this;
|
||||
|
||||
if (util.isArray(newDoc)) {
|
||||
preparedDoc = [];
|
||||
newDoc.forEach(function (doc) { preparedDoc.push(self.prepareDocumentForInsertion(doc)); });
|
||||
} else {
|
||||
preparedDoc = model.deepCopy(newDoc);
|
||||
if (preparedDoc._id === undefined) { preparedDoc._id = this.createNewId(); }
|
||||
var now = new Date();
|
||||
if (this.timestampData && preparedDoc.createdAt === undefined) { preparedDoc.createdAt = now; }
|
||||
if (this.timestampData && preparedDoc.updatedAt === undefined) { preparedDoc.updatedAt = now; }
|
||||
model.checkObject(preparedDoc);
|
||||
}
|
||||
|
||||
return preparedDoc;
|
||||
};
|
||||
|
||||
/**
|
||||
* If newDoc is an array of documents, this will insert all documents in the cache
|
||||
* @api private
|
||||
*/
|
||||
Datastore.prototype._insertInCache = function (preparedDoc) {
|
||||
if (util.isArray(preparedDoc)) {
|
||||
this._insertMultipleDocsInCache(preparedDoc);
|
||||
} else {
|
||||
this.addToIndexes(preparedDoc);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* If one insertion fails (e.g. because of a unique constraint), roll back all previous
|
||||
* inserts and throws the error
|
||||
* @api private
|
||||
*/
|
||||
Datastore.prototype._insertMultipleDocsInCache = function (preparedDocs) {
|
||||
var i, failingI, error;
|
||||
|
||||
for (i = 0; i < preparedDocs.length; i += 1) {
|
||||
try {
|
||||
this.addToIndexes(preparedDocs[i]);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
failingI = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (error) {
|
||||
for (i = 0; i < failingI; i += 1) {
|
||||
this.removeFromIndexes(preparedDocs[i]);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
Datastore.prototype.insert = function () {
|
||||
this.executor.push({ this: this, fn: this._insert, arguments: arguments });
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Count all documents matching the query
|
||||
* @param {Object} query MongoDB-style query
|
||||
*/
|
||||
Datastore.prototype.count = function(query, callback) {
|
||||
var cursor = new Cursor(this, query, function(err, docs, callback) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null, docs.length);
|
||||
});
|
||||
|
||||
if (typeof callback === 'function') {
|
||||
cursor.exec(callback);
|
||||
} else {
|
||||
return cursor;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Find all documents matching the query
|
||||
* If no callback is passed, we return the cursor so that user can limit, skip and finally exec
|
||||
* @param {Object} query MongoDB-style query
|
||||
* @param {Object} projection MongoDB-style projection
|
||||
*/
|
||||
Datastore.prototype.find = function (query, projection, callback) {
|
||||
switch (arguments.length) {
|
||||
case 1:
|
||||
projection = {};
|
||||
// callback is undefined, will return a cursor
|
||||
break;
|
||||
case 2:
|
||||
if (typeof projection === 'function') {
|
||||
callback = projection;
|
||||
projection = {};
|
||||
} // If not assume projection is an object and callback undefined
|
||||
break;
|
||||
}
|
||||
|
||||
var cursor = new Cursor(this, query, function(err, docs, callback) {
|
||||
var res = [], i;
|
||||
|
||||
if (err) { return callback(err); }
|
||||
|
||||
for (i = 0; i < docs.length; i += 1) {
|
||||
res.push(model.deepCopy(docs[i]));
|
||||
}
|
||||
return callback(null, res);
|
||||
});
|
||||
|
||||
cursor.projection(projection);
|
||||
if (typeof callback === 'function') {
|
||||
cursor.exec(callback);
|
||||
} else {
|
||||
return cursor;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Find one document matching the query
|
||||
* @param {Object} query MongoDB-style query
|
||||
* @param {Object} projection MongoDB-style projection
|
||||
*/
|
||||
Datastore.prototype.findOne = function (query, projection, callback) {
|
||||
switch (arguments.length) {
|
||||
case 1:
|
||||
projection = {};
|
||||
// callback is undefined, will return a cursor
|
||||
break;
|
||||
case 2:
|
||||
if (typeof projection === 'function') {
|
||||
callback = projection;
|
||||
projection = {};
|
||||
} // If not assume projection is an object and callback undefined
|
||||
break;
|
||||
}
|
||||
|
||||
var cursor = new Cursor(this, query, function(err, docs, callback) {
|
||||
if (err) { return callback(err); }
|
||||
if (docs.length === 1) {
|
||||
return callback(null, model.deepCopy(docs[0]));
|
||||
} else {
|
||||
return callback(null, null);
|
||||
}
|
||||
});
|
||||
|
||||
cursor.projection(projection).limit(1);
|
||||
if (typeof callback === 'function') {
|
||||
cursor.exec(callback);
|
||||
} else {
|
||||
return cursor;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update all docs matching query
|
||||
* @param {Object} query
|
||||
* @param {Object} updateQuery
|
||||
* @param {Object} options Optional options
|
||||
* options.multi If true, can update multiple documents (defaults to false)
|
||||
* options.upsert If true, document is inserted if the query doesn't match anything
|
||||
* options.returnUpdatedDocs Defaults to false, if true return as third argument the array of updated matched documents (even if no change actually took place)
|
||||
* @param {Function} cb Optional callback, signature: (err, numAffected, affectedDocuments, upsert)
|
||||
* If update was an upsert, upsert flag is set to true
|
||||
* affectedDocuments can be one of the following:
|
||||
* * For an upsert, the upserted document
|
||||
* * For an update with returnUpdatedDocs option false, null
|
||||
* * For an update with returnUpdatedDocs true and multi false, the updated document
|
||||
* * For an update with returnUpdatedDocs true and multi true, the array of updated documents
|
||||
*
|
||||
* WARNING: The API was changed between v1.7.4 and v1.8, for consistency and readability reasons. Prior and including to v1.7.4,
|
||||
* the callback signature was (err, numAffected, updated) where updated was the updated document in case of an upsert
|
||||
* or the array of updated documents for an update if the returnUpdatedDocs option was true. That meant that the type of
|
||||
* affectedDocuments in a non multi update depended on whether there was an upsert or not, leaving only two ways for the
|
||||
* user to check whether an upsert had occured: checking the type of affectedDocuments or running another find query on
|
||||
* the whole dataset to check its size. Both options being ugly, the breaking change was necessary.
|
||||
*
|
||||
* @api private Use Datastore.update which has the same signature
|
||||
*/
|
||||
Datastore.prototype._update = function (query, updateQuery, options, cb) {
|
||||
var callback
|
||||
, self = this
|
||||
, numReplaced = 0
|
||||
, multi, upsert
|
||||
, i
|
||||
;
|
||||
|
||||
if (typeof options === 'function') { cb = options; options = {}; }
|
||||
callback = cb || function () {};
|
||||
multi = options.multi !== undefined ? options.multi : false;
|
||||
upsert = options.upsert !== undefined ? options.upsert : false;
|
||||
|
||||
async.waterfall([
|
||||
function (cb) { // If upsert option is set, check whether we need to insert the doc
|
||||
if (!upsert) { return cb(); }
|
||||
|
||||
// Need to use an internal function not tied to the executor to avoid deadlock
|
||||
var cursor = new Cursor(self, query);
|
||||
cursor.limit(1)._exec(function (err, docs) {
|
||||
if (err) { return callback(err); }
|
||||
if (docs.length === 1) {
|
||||
return cb();
|
||||
} else {
|
||||
var toBeInserted;
|
||||
|
||||
try {
|
||||
model.checkObject(updateQuery);
|
||||
// updateQuery is a simple object with no modifier, use it as the document to insert
|
||||
toBeInserted = updateQuery;
|
||||
} catch (e) {
|
||||
// updateQuery contains modifiers, use the find query as the base,
|
||||
// strip it from all operators and update it according to updateQuery
|
||||
try {
|
||||
toBeInserted = model.modify(model.deepCopy(query, true), updateQuery);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
}
|
||||
|
||||
return self._insert(toBeInserted, function (err, newDoc) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null, 1, newDoc, true);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
, function () { // Perform the update
|
||||
var modifiedDoc , modifications = [], createdAt;
|
||||
|
||||
self.getCandidates(query, function (err, candidates) {
|
||||
if (err) { return callback(err); }
|
||||
|
||||
// Preparing update (if an error is thrown here neither the datafile nor
|
||||
// the in-memory indexes are affected)
|
||||
try {
|
||||
for (i = 0; i < candidates.length; i += 1) {
|
||||
if (model.match(candidates[i], query) && (multi || numReplaced === 0)) {
|
||||
numReplaced += 1;
|
||||
if (self.timestampData) { createdAt = candidates[i].createdAt; }
|
||||
modifiedDoc = model.modify(candidates[i], updateQuery);
|
||||
if (self.timestampData) {
|
||||
modifiedDoc.createdAt = createdAt;
|
||||
modifiedDoc.updatedAt = new Date();
|
||||
}
|
||||
modifications.push({ oldDoc: candidates[i], newDoc: modifiedDoc });
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// Change the docs in memory
|
||||
try {
|
||||
self.updateIndexes(modifications);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// Update the datafile
|
||||
var updatedDocs = _.pluck(modifications, 'newDoc');
|
||||
self.persistence.persistNewState(updatedDocs, function (err) {
|
||||
if (err) { return callback(err); }
|
||||
if (!options.returnUpdatedDocs) {
|
||||
return callback(null, numReplaced);
|
||||
} else {
|
||||
var updatedDocsDC = [];
|
||||
updatedDocs.forEach(function (doc) { updatedDocsDC.push(model.deepCopy(doc)); });
|
||||
if (! multi) { updatedDocsDC = updatedDocsDC[0]; }
|
||||
return callback(null, numReplaced, updatedDocsDC);
|
||||
}
|
||||
});
|
||||
});
|
||||
}]);
|
||||
};
|
||||
|
||||
Datastore.prototype.update = function () {
|
||||
this.executor.push({ this: this, fn: this._update, arguments: arguments });
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove all docs matching the query
|
||||
* For now very naive implementation (similar to update)
|
||||
* @param {Object} query
|
||||
* @param {Object} options Optional options
|
||||
* options.multi If true, can update multiple documents (defaults to false)
|
||||
* @param {Function} cb Optional callback, signature: err, numRemoved
|
||||
*
|
||||
* @api private Use Datastore.remove which has the same signature
|
||||
*/
|
||||
Datastore.prototype._remove = function (query, options, cb) {
|
||||
var callback
|
||||
, self = this, numRemoved = 0, removedDocs = [], multi
|
||||
;
|
||||
|
||||
if (typeof options === 'function') { cb = options; options = {}; }
|
||||
callback = cb || function () {};
|
||||
multi = options.multi !== undefined ? options.multi : false;
|
||||
|
||||
this.getCandidates(query, true, function (err, candidates) {
|
||||
if (err) { return callback(err); }
|
||||
|
||||
try {
|
||||
candidates.forEach(function (d) {
|
||||
if (model.match(d, query) && (multi || numRemoved === 0)) {
|
||||
numRemoved += 1;
|
||||
removedDocs.push({ $$deleted: true, _id: d._id });
|
||||
self.removeFromIndexes(d);
|
||||
}
|
||||
});
|
||||
} catch (err) { return callback(err); }
|
||||
|
||||
self.persistence.persistNewState(removedDocs, function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null, numRemoved);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
Datastore.prototype.remove = function () {
|
||||
this.executor.push({ this: this, fn: this._remove, arguments: arguments });
|
||||
};
|
||||
|
||||
|
||||
|
||||
module.exports = Datastore;
|
||||
78
resources/app/node_modules/nedb/lib/executor.js
generated
vendored
Executable file
78
resources/app/node_modules/nedb/lib/executor.js
generated
vendored
Executable file
@@ -0,0 +1,78 @@
|
||||
/**
|
||||
* Responsible for sequentially executing actions on the database
|
||||
*/
|
||||
|
||||
var async = require('async')
|
||||
;
|
||||
|
||||
function Executor () {
|
||||
this.buffer = [];
|
||||
this.ready = false;
|
||||
|
||||
// This queue will execute all commands, one-by-one in order
|
||||
this.queue = async.queue(function (task, cb) {
|
||||
var newArguments = [];
|
||||
|
||||
// task.arguments is an array-like object on which adding a new field doesn't work, so we transform it into a real array
|
||||
for (var i = 0; i < task.arguments.length; i += 1) { newArguments.push(task.arguments[i]); }
|
||||
var lastArg = task.arguments[task.arguments.length - 1];
|
||||
|
||||
// Always tell the queue task is complete. Execute callback if any was given.
|
||||
if (typeof lastArg === 'function') {
|
||||
// Callback was supplied
|
||||
newArguments[newArguments.length - 1] = function () {
|
||||
if (typeof setImmediate === 'function') {
|
||||
setImmediate(cb);
|
||||
} else {
|
||||
process.nextTick(cb);
|
||||
}
|
||||
lastArg.apply(null, arguments);
|
||||
};
|
||||
} else if (!lastArg && task.arguments.length !== 0) {
|
||||
// false/undefined/null supplied as callbback
|
||||
newArguments[newArguments.length - 1] = function () { cb(); };
|
||||
} else {
|
||||
// Nothing supplied as callback
|
||||
newArguments.push(function () { cb(); });
|
||||
}
|
||||
|
||||
|
||||
task.fn.apply(task.this, newArguments);
|
||||
}, 1);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* If executor is ready, queue task (and process it immediately if executor was idle)
|
||||
* If not, buffer task for later processing
|
||||
* @param {Object} task
|
||||
* task.this - Object to use as this
|
||||
* task.fn - Function to execute
|
||||
* task.arguments - Array of arguments, IMPORTANT: only the last argument may be a function (the callback)
|
||||
* and the last argument cannot be false/undefined/null
|
||||
* @param {Boolean} forceQueuing Optional (defaults to false) force executor to queue task even if it is not ready
|
||||
*/
|
||||
Executor.prototype.push = function (task, forceQueuing) {
|
||||
if (this.ready || forceQueuing) {
|
||||
this.queue.push(task);
|
||||
} else {
|
||||
this.buffer.push(task);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Queue all tasks in buffer (in the same order they came in)
|
||||
* Automatically sets executor as ready
|
||||
*/
|
||||
Executor.prototype.processBuffer = function () {
|
||||
var i;
|
||||
this.ready = true;
|
||||
for (i = 0; i < this.buffer.length; i += 1) { this.queue.push(this.buffer[i]); }
|
||||
this.buffer = [];
|
||||
};
|
||||
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = Executor;
|
||||
294
resources/app/node_modules/nedb/lib/indexes.js
generated
vendored
Executable file
294
resources/app/node_modules/nedb/lib/indexes.js
generated
vendored
Executable file
@@ -0,0 +1,294 @@
|
||||
var BinarySearchTree = require('binary-search-tree').AVLTree
|
||||
, model = require('./model')
|
||||
, _ = require('underscore')
|
||||
, util = require('util')
|
||||
;
|
||||
|
||||
/**
|
||||
* Two indexed pointers are equal iif they point to the same place
|
||||
*/
|
||||
function checkValueEquality (a, b) {
|
||||
return a === b;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type-aware projection
|
||||
*/
|
||||
function projectForUnique (elt) {
|
||||
if (elt === null) { return '$null'; }
|
||||
if (typeof elt === 'string') { return '$string' + elt; }
|
||||
if (typeof elt === 'boolean') { return '$boolean' + elt; }
|
||||
if (typeof elt === 'number') { return '$number' + elt; }
|
||||
if (util.isArray(elt)) { return '$date' + elt.getTime(); }
|
||||
|
||||
return elt; // Arrays and objects, will check for pointer equality
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create a new index
|
||||
* All methods on an index guarantee that either the whole operation was successful and the index changed
|
||||
* or the operation was unsuccessful and an error is thrown while the index is unchanged
|
||||
* @param {String} options.fieldName On which field should the index apply (can use dot notation to index on sub fields)
|
||||
* @param {Boolean} options.unique Optional, enforce a unique constraint (default: false)
|
||||
* @param {Boolean} options.sparse Optional, allow a sparse index (we can have documents for which fieldName is undefined) (default: false)
|
||||
*/
|
||||
function Index (options) {
|
||||
this.fieldName = options.fieldName;
|
||||
this.unique = options.unique || false;
|
||||
this.sparse = options.sparse || false;
|
||||
|
||||
this.treeOptions = { unique: this.unique, compareKeys: model.compareThings, checkValueEquality: checkValueEquality };
|
||||
|
||||
this.reset(); // No data in the beginning
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Reset an index
|
||||
* @param {Document or Array of documents} newData Optional, data to initialize the index with
|
||||
* If an error is thrown during insertion, the index is not modified
|
||||
*/
|
||||
Index.prototype.reset = function (newData) {
|
||||
this.tree = new BinarySearchTree(this.treeOptions);
|
||||
|
||||
if (newData) { this.insert(newData); }
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Insert a new document in the index
|
||||
* If an array is passed, we insert all its elements (if one insertion fails the index is not modified)
|
||||
* O(log(n))
|
||||
*/
|
||||
Index.prototype.insert = function (doc) {
|
||||
var key, self = this
|
||||
, keys, i, failingI, error
|
||||
;
|
||||
|
||||
if (util.isArray(doc)) { this.insertMultipleDocs(doc); return; }
|
||||
|
||||
key = model.getDotValue(doc, this.fieldName);
|
||||
|
||||
// We don't index documents that don't contain the field if the index is sparse
|
||||
if (key === undefined && this.sparse) { return; }
|
||||
|
||||
if (!util.isArray(key)) {
|
||||
this.tree.insert(key, doc);
|
||||
} else {
|
||||
// If an insert fails due to a unique constraint, roll back all inserts before it
|
||||
keys = _.uniq(key, projectForUnique);
|
||||
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
try {
|
||||
this.tree.insert(keys[i], doc);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
failingI = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (error) {
|
||||
for (i = 0; i < failingI; i += 1) {
|
||||
this.tree.delete(keys[i], doc);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Insert an array of documents in the index
|
||||
* If a constraint is violated, the changes should be rolled back and an error thrown
|
||||
*
|
||||
* @API private
|
||||
*/
|
||||
Index.prototype.insertMultipleDocs = function (docs) {
|
||||
var i, error, failingI;
|
||||
|
||||
for (i = 0; i < docs.length; i += 1) {
|
||||
try {
|
||||
this.insert(docs[i]);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
failingI = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (error) {
|
||||
for (i = 0; i < failingI; i += 1) {
|
||||
this.remove(docs[i]);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove a document from the index
|
||||
* If an array is passed, we remove all its elements
|
||||
* The remove operation is safe with regards to the 'unique' constraint
|
||||
* O(log(n))
|
||||
*/
|
||||
Index.prototype.remove = function (doc) {
|
||||
var key, self = this;
|
||||
|
||||
if (util.isArray(doc)) { doc.forEach(function (d) { self.remove(d); }); return; }
|
||||
|
||||
key = model.getDotValue(doc, this.fieldName);
|
||||
|
||||
if (key === undefined && this.sparse) { return; }
|
||||
|
||||
if (!util.isArray(key)) {
|
||||
this.tree.delete(key, doc);
|
||||
} else {
|
||||
_.uniq(key, projectForUnique).forEach(function (_key) {
|
||||
self.tree.delete(_key, doc);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update a document in the index
|
||||
* If a constraint is violated, changes are rolled back and an error thrown
|
||||
* Naive implementation, still in O(log(n))
|
||||
*/
|
||||
Index.prototype.update = function (oldDoc, newDoc) {
|
||||
if (util.isArray(oldDoc)) { this.updateMultipleDocs(oldDoc); return; }
|
||||
|
||||
this.remove(oldDoc);
|
||||
|
||||
try {
|
||||
this.insert(newDoc);
|
||||
} catch (e) {
|
||||
this.insert(oldDoc);
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update multiple documents in the index
|
||||
* If a constraint is violated, the changes need to be rolled back
|
||||
* and an error thrown
|
||||
* @param {Array of oldDoc, newDoc pairs} pairs
|
||||
*
|
||||
* @API private
|
||||
*/
|
||||
Index.prototype.updateMultipleDocs = function (pairs) {
|
||||
var i, failingI, error;
|
||||
|
||||
for (i = 0; i < pairs.length; i += 1) {
|
||||
this.remove(pairs[i].oldDoc);
|
||||
}
|
||||
|
||||
for (i = 0; i < pairs.length; i += 1) {
|
||||
try {
|
||||
this.insert(pairs[i].newDoc);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
failingI = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If an error was raised, roll back changes in the inverse order
|
||||
if (error) {
|
||||
for (i = 0; i < failingI; i += 1) {
|
||||
this.remove(pairs[i].newDoc);
|
||||
}
|
||||
|
||||
for (i = 0; i < pairs.length; i += 1) {
|
||||
this.insert(pairs[i].oldDoc);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Revert an update
|
||||
*/
|
||||
Index.prototype.revertUpdate = function (oldDoc, newDoc) {
|
||||
var revert = [];
|
||||
|
||||
if (!util.isArray(oldDoc)) {
|
||||
this.update(newDoc, oldDoc);
|
||||
} else {
|
||||
oldDoc.forEach(function (pair) {
|
||||
revert.push({ oldDoc: pair.newDoc, newDoc: pair.oldDoc });
|
||||
});
|
||||
this.update(revert);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get all documents in index whose key match value (if it is a Thing) or one of the elements of value (if it is an array of Things)
|
||||
* @param {Thing} value Value to match the key against
|
||||
* @return {Array of documents}
|
||||
*/
|
||||
Index.prototype.getMatching = function (value) {
|
||||
var self = this;
|
||||
|
||||
if (!util.isArray(value)) {
|
||||
return self.tree.search(value);
|
||||
} else {
|
||||
var _res = {}, res = [];
|
||||
|
||||
value.forEach(function (v) {
|
||||
self.getMatching(v).forEach(function (doc) {
|
||||
_res[doc._id] = doc;
|
||||
});
|
||||
});
|
||||
|
||||
Object.keys(_res).forEach(function (_id) {
|
||||
res.push(_res[_id]);
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get all documents in index whose key is between bounds are they are defined by query
|
||||
* Documents are sorted by key
|
||||
* @param {Query} query
|
||||
* @return {Array of documents}
|
||||
*/
|
||||
Index.prototype.getBetweenBounds = function (query) {
|
||||
return this.tree.betweenBounds(query);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get all elements in the index
|
||||
* @return {Array of documents}
|
||||
*/
|
||||
Index.prototype.getAll = function () {
|
||||
var res = [];
|
||||
|
||||
this.tree.executeOnEveryNode(function (node) {
|
||||
var i;
|
||||
|
||||
for (i = 0; i < node.data.length; i += 1) {
|
||||
res.push(node.data[i]);
|
||||
}
|
||||
});
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = Index;
|
||||
835
resources/app/node_modules/nedb/lib/model.js
generated
vendored
Executable file
835
resources/app/node_modules/nedb/lib/model.js
generated
vendored
Executable file
@@ -0,0 +1,835 @@
|
||||
/**
|
||||
* Handle models (i.e. docs)
|
||||
* Serialization/deserialization
|
||||
* Copying
|
||||
* Querying, update
|
||||
*/
|
||||
|
||||
var util = require('util')
|
||||
, _ = require('underscore')
|
||||
, modifierFunctions = {}
|
||||
, lastStepModifierFunctions = {}
|
||||
, comparisonFunctions = {}
|
||||
, logicalOperators = {}
|
||||
, arrayComparisonFunctions = {}
|
||||
;
|
||||
|
||||
|
||||
/**
|
||||
* Check a key, throw an error if the key is non valid
|
||||
* @param {String} k key
|
||||
* @param {Model} v value, needed to treat the Date edge case
|
||||
* Non-treatable edge cases here: if part of the object if of the form { $$date: number } or { $$deleted: true }
|
||||
* Its serialized-then-deserialized version it will transformed into a Date object
|
||||
* But you really need to want it to trigger such behaviour, even when warned not to use '$' at the beginning of the field names...
|
||||
*/
|
||||
function checkKey (k, v) {
|
||||
if (typeof k === 'number') {
|
||||
k = k.toString();
|
||||
}
|
||||
|
||||
if (k[0] === '$' && !(k === '$$date' && typeof v === 'number') && !(k === '$$deleted' && v === true) && !(k === '$$indexCreated') && !(k === '$$indexRemoved')) {
|
||||
throw new Error('Field names cannot begin with the $ character');
|
||||
}
|
||||
|
||||
if (k.indexOf('.') !== -1) {
|
||||
throw new Error('Field names cannot contain a .');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check a DB object and throw an error if it's not valid
|
||||
* Works by applying the above checkKey function to all fields recursively
|
||||
*/
|
||||
function checkObject (obj) {
|
||||
if (util.isArray(obj)) {
|
||||
obj.forEach(function (o) {
|
||||
checkObject(o);
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof obj === 'object' && obj !== null) {
|
||||
Object.keys(obj).forEach(function (k) {
|
||||
checkKey(k, obj[k]);
|
||||
checkObject(obj[k]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Serialize an object to be persisted to a one-line string
|
||||
* For serialization/deserialization, we use the native JSON parser and not eval or Function
|
||||
* That gives us less freedom but data entered in the database may come from users
|
||||
* so eval and the like are not safe
|
||||
* Accepted primitive types: Number, String, Boolean, Date, null
|
||||
* Accepted secondary types: Objects, Arrays
|
||||
*/
|
||||
function serialize (obj) {
|
||||
var res;
|
||||
|
||||
res = JSON.stringify(obj, function (k, v) {
|
||||
checkKey(k, v);
|
||||
|
||||
if (v === undefined) { return undefined; }
|
||||
if (v === null) { return null; }
|
||||
|
||||
// Hackish way of checking if object is Date (this way it works between execution contexts in node-webkit).
|
||||
// We can't use value directly because for dates it is already string in this function (date.toJSON was already called), so we use this
|
||||
if (typeof this[k].getTime === 'function') { return { $$date: this[k].getTime() }; }
|
||||
|
||||
return v;
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* From a one-line representation of an object generate by the serialize function
|
||||
* Return the object itself
|
||||
*/
|
||||
function deserialize (rawData) {
|
||||
return JSON.parse(rawData, function (k, v) {
|
||||
if (k === '$$date') { return new Date(v); }
|
||||
if (typeof v === 'string' || typeof v === 'number' || typeof v === 'boolean' || v === null) { return v; }
|
||||
if (v && v.$$date) { return v.$$date; }
|
||||
|
||||
return v;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Deep copy a DB object
|
||||
* The optional strictKeys flag (defaulting to false) indicates whether to copy everything or only fields
|
||||
* where the keys are valid, i.e. don't begin with $ and don't contain a .
|
||||
*/
|
||||
function deepCopy (obj, strictKeys) {
|
||||
var res;
|
||||
|
||||
if ( typeof obj === 'boolean' ||
|
||||
typeof obj === 'number' ||
|
||||
typeof obj === 'string' ||
|
||||
obj === null ||
|
||||
(util.isDate(obj)) ) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (util.isArray(obj)) {
|
||||
res = [];
|
||||
obj.forEach(function (o) { res.push(deepCopy(o, strictKeys)); });
|
||||
return res;
|
||||
}
|
||||
|
||||
if (typeof obj === 'object') {
|
||||
res = {};
|
||||
Object.keys(obj).forEach(function (k) {
|
||||
if (!strictKeys || (k[0] !== '$' && k.indexOf('.') === -1)) {
|
||||
res[k] = deepCopy(obj[k], strictKeys);
|
||||
}
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
||||
return undefined; // For now everything else is undefined. We should probably throw an error instead
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Tells if an object is a primitive type or a "real" object
|
||||
* Arrays are considered primitive
|
||||
*/
|
||||
function isPrimitiveType (obj) {
|
||||
return ( typeof obj === 'boolean' ||
|
||||
typeof obj === 'number' ||
|
||||
typeof obj === 'string' ||
|
||||
obj === null ||
|
||||
util.isDate(obj) ||
|
||||
util.isArray(obj));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Utility functions for comparing things
|
||||
* Assumes type checking was already done (a and b already have the same type)
|
||||
* compareNSB works for numbers, strings and booleans
|
||||
*/
|
||||
function compareNSB (a, b) {
|
||||
if (a < b) { return -1; }
|
||||
if (a > b) { return 1; }
|
||||
return 0;
|
||||
}
|
||||
|
||||
function compareArrays (a, b) {
|
||||
var i, comp;
|
||||
|
||||
for (i = 0; i < Math.min(a.length, b.length); i += 1) {
|
||||
comp = compareThings(a[i], b[i]);
|
||||
|
||||
if (comp !== 0) { return comp; }
|
||||
}
|
||||
|
||||
// Common section was identical, longest one wins
|
||||
return compareNSB(a.length, b.length);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Compare { things U undefined }
|
||||
* Things are defined as any native types (string, number, boolean, null, date) and objects
|
||||
* We need to compare with undefined as it will be used in indexes
|
||||
* In the case of objects and arrays, we deep-compare
|
||||
* If two objects dont have the same type, the (arbitrary) type hierarchy is: undefined, null, number, strings, boolean, dates, arrays, objects
|
||||
* Return -1 if a < b, 1 if a > b and 0 if a = b (note that equality here is NOT the same as defined in areThingsEqual!)
|
||||
*
|
||||
* @param {Function} _compareStrings String comparing function, returning -1, 0 or 1, overriding default string comparison (useful for languages with accented letters)
|
||||
*/
|
||||
function compareThings (a, b, _compareStrings) {
|
||||
var aKeys, bKeys, comp, i
|
||||
, compareStrings = _compareStrings || compareNSB;
|
||||
|
||||
// undefined
|
||||
if (a === undefined) { return b === undefined ? 0 : -1; }
|
||||
if (b === undefined) { return a === undefined ? 0 : 1; }
|
||||
|
||||
// null
|
||||
if (a === null) { return b === null ? 0 : -1; }
|
||||
if (b === null) { return a === null ? 0 : 1; }
|
||||
|
||||
// Numbers
|
||||
if (typeof a === 'number') { return typeof b === 'number' ? compareNSB(a, b) : -1; }
|
||||
if (typeof b === 'number') { return typeof a === 'number' ? compareNSB(a, b) : 1; }
|
||||
|
||||
// Strings
|
||||
if (typeof a === 'string') { return typeof b === 'string' ? compareStrings(a, b) : -1; }
|
||||
if (typeof b === 'string') { return typeof a === 'string' ? compareStrings(a, b) : 1; }
|
||||
|
||||
// Booleans
|
||||
if (typeof a === 'boolean') { return typeof b === 'boolean' ? compareNSB(a, b) : -1; }
|
||||
if (typeof b === 'boolean') { return typeof a === 'boolean' ? compareNSB(a, b) : 1; }
|
||||
|
||||
// Dates
|
||||
if (util.isDate(a)) { return util.isDate(b) ? compareNSB(a.getTime(), b.getTime()) : -1; }
|
||||
if (util.isDate(b)) { return util.isDate(a) ? compareNSB(a.getTime(), b.getTime()) : 1; }
|
||||
|
||||
// Arrays (first element is most significant and so on)
|
||||
if (util.isArray(a)) { return util.isArray(b) ? compareArrays(a, b) : -1; }
|
||||
if (util.isArray(b)) { return util.isArray(a) ? compareArrays(a, b) : 1; }
|
||||
|
||||
// Objects
|
||||
aKeys = Object.keys(a).sort();
|
||||
bKeys = Object.keys(b).sort();
|
||||
|
||||
for (i = 0; i < Math.min(aKeys.length, bKeys.length); i += 1) {
|
||||
comp = compareThings(a[aKeys[i]], b[bKeys[i]]);
|
||||
|
||||
if (comp !== 0) { return comp; }
|
||||
}
|
||||
|
||||
return compareNSB(aKeys.length, bKeys.length);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==============================================================
|
||||
// Updating documents
|
||||
// ==============================================================
|
||||
|
||||
/**
|
||||
* The signature of modifier functions is as follows
|
||||
* Their structure is always the same: recursively follow the dot notation while creating
|
||||
* the nested documents if needed, then apply the "last step modifier"
|
||||
* @param {Object} obj The model to modify
|
||||
* @param {String} field Can contain dots, in that case that means we will set a subfield recursively
|
||||
* @param {Model} value
|
||||
*/
|
||||
|
||||
/**
|
||||
* Set a field to a new value
|
||||
*/
|
||||
lastStepModifierFunctions.$set = function (obj, field, value) {
|
||||
obj[field] = value;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Unset a field
|
||||
*/
|
||||
lastStepModifierFunctions.$unset = function (obj, field, value) {
|
||||
delete obj[field];
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Push an element to the end of an array field
|
||||
* Optional modifier $each instead of value to push several values
|
||||
* Optional modifier $slice to slice the resulting array, see https://docs.mongodb.org/manual/reference/operator/update/slice/
|
||||
* Différeence with MongoDB: if $slice is specified and not $each, we act as if value is an empty array
|
||||
*/
|
||||
lastStepModifierFunctions.$push = function (obj, field, value) {
|
||||
// Create the array if it doesn't exist
|
||||
if (!obj.hasOwnProperty(field)) { obj[field] = []; }
|
||||
|
||||
if (!util.isArray(obj[field])) { throw new Error("Can't $push an element on non-array values"); }
|
||||
|
||||
if (value !== null && typeof value === 'object' && value.$slice && value.$each === undefined) {
|
||||
value.$each = [];
|
||||
}
|
||||
|
||||
if (value !== null && typeof value === 'object' && value.$each) {
|
||||
if (Object.keys(value).length >= 3 || (Object.keys(value).length === 2 && value.$slice === undefined)) { throw new Error("Can only use $slice in cunjunction with $each when $push to array"); }
|
||||
if (!util.isArray(value.$each)) { throw new Error("$each requires an array value"); }
|
||||
|
||||
value.$each.forEach(function (v) {
|
||||
obj[field].push(v);
|
||||
});
|
||||
|
||||
if (value.$slice === undefined || typeof value.$slice !== 'number') { return; }
|
||||
|
||||
if (value.$slice === 0) {
|
||||
obj[field] = [];
|
||||
} else {
|
||||
var start, end, n = obj[field].length;
|
||||
if (value.$slice < 0) {
|
||||
start = Math.max(0, n + value.$slice);
|
||||
end = n;
|
||||
} else if (value.$slice > 0) {
|
||||
start = 0;
|
||||
end = Math.min(n, value.$slice);
|
||||
}
|
||||
obj[field] = obj[field].slice(start, end);
|
||||
}
|
||||
} else {
|
||||
obj[field].push(value);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Add an element to an array field only if it is not already in it
|
||||
* No modification if the element is already in the array
|
||||
* Note that it doesn't check whether the original array contains duplicates
|
||||
*/
|
||||
lastStepModifierFunctions.$addToSet = function (obj, field, value) {
|
||||
var addToSet = true;
|
||||
|
||||
// Create the array if it doesn't exist
|
||||
if (!obj.hasOwnProperty(field)) { obj[field] = []; }
|
||||
|
||||
if (!util.isArray(obj[field])) { throw new Error("Can't $addToSet an element on non-array values"); }
|
||||
|
||||
if (value !== null && typeof value === 'object' && value.$each) {
|
||||
if (Object.keys(value).length > 1) { throw new Error("Can't use another field in conjunction with $each"); }
|
||||
if (!util.isArray(value.$each)) { throw new Error("$each requires an array value"); }
|
||||
|
||||
value.$each.forEach(function (v) {
|
||||
lastStepModifierFunctions.$addToSet(obj, field, v);
|
||||
});
|
||||
} else {
|
||||
obj[field].forEach(function (v) {
|
||||
if (compareThings(v, value) === 0) { addToSet = false; }
|
||||
});
|
||||
if (addToSet) { obj[field].push(value); }
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove the first or last element of an array
|
||||
*/
|
||||
lastStepModifierFunctions.$pop = function (obj, field, value) {
|
||||
if (!util.isArray(obj[field])) { throw new Error("Can't $pop an element from non-array values"); }
|
||||
if (typeof value !== 'number') { throw new Error(value + " isn't an integer, can't use it with $pop"); }
|
||||
if (value === 0) { return; }
|
||||
|
||||
if (value > 0) {
|
||||
obj[field] = obj[field].slice(0, obj[field].length - 1);
|
||||
} else {
|
||||
obj[field] = obj[field].slice(1);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Removes all instances of a value from an existing array
|
||||
*/
|
||||
lastStepModifierFunctions.$pull = function (obj, field, value) {
|
||||
var arr, i;
|
||||
|
||||
if (!util.isArray(obj[field])) { throw new Error("Can't $pull an element from non-array values"); }
|
||||
|
||||
arr = obj[field];
|
||||
for (i = arr.length - 1; i >= 0; i -= 1) {
|
||||
if (match(arr[i], value)) {
|
||||
arr.splice(i, 1);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Increment a numeric field's value
|
||||
*/
|
||||
lastStepModifierFunctions.$inc = function (obj, field, value) {
|
||||
if (typeof value !== 'number') { throw new Error(value + " must be a number"); }
|
||||
|
||||
if (typeof obj[field] !== 'number') {
|
||||
if (!_.has(obj, field)) {
|
||||
obj[field] = value;
|
||||
} else {
|
||||
throw new Error("Don't use the $inc modifier on non-number fields");
|
||||
}
|
||||
} else {
|
||||
obj[field] += value;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Updates the value of the field, only if specified field is greater than the current value of the field
|
||||
*/
|
||||
lastStepModifierFunctions.$max = function (obj, field, value) {
|
||||
if (typeof obj[field] === 'undefined') {
|
||||
obj[field] = value;
|
||||
} else if (value > obj[field]) {
|
||||
obj[field] = value;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Updates the value of the field, only if specified field is smaller than the current value of the field
|
||||
*/
|
||||
lastStepModifierFunctions.$min = function (obj, field, value) {
|
||||
if (typeof obj[field] === 'undefined') {
|
||||
obj[field] = value;
|
||||
} else if (value < obj[field]) {
|
||||
obj[field] = value;
|
||||
}
|
||||
};
|
||||
|
||||
// Given its name, create the complete modifier function
|
||||
function createModifierFunction (modifier) {
|
||||
return function (obj, field, value) {
|
||||
var fieldParts = typeof field === 'string' ? field.split('.') : field;
|
||||
|
||||
if (fieldParts.length === 1) {
|
||||
lastStepModifierFunctions[modifier](obj, field, value);
|
||||
} else {
|
||||
if (obj[fieldParts[0]] === undefined) {
|
||||
if (modifier === '$unset') { return; } // Bad looking specific fix, needs to be generalized modifiers that behave like $unset are implemented
|
||||
obj[fieldParts[0]] = {};
|
||||
}
|
||||
modifierFunctions[modifier](obj[fieldParts[0]], fieldParts.slice(1), value);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Actually create all modifier functions
|
||||
Object.keys(lastStepModifierFunctions).forEach(function (modifier) {
|
||||
modifierFunctions[modifier] = createModifierFunction(modifier);
|
||||
});
|
||||
|
||||
|
||||
/**
|
||||
* Modify a DB object according to an update query
|
||||
*/
|
||||
function modify (obj, updateQuery) {
|
||||
var keys = Object.keys(updateQuery)
|
||||
, firstChars = _.map(keys, function (item) { return item[0]; })
|
||||
, dollarFirstChars = _.filter(firstChars, function (c) { return c === '$'; })
|
||||
, newDoc, modifiers
|
||||
;
|
||||
|
||||
if (keys.indexOf('_id') !== -1 && updateQuery._id !== obj._id) { throw new Error("You cannot change a document's _id"); }
|
||||
|
||||
if (dollarFirstChars.length !== 0 && dollarFirstChars.length !== firstChars.length) {
|
||||
throw new Error("You cannot mix modifiers and normal fields");
|
||||
}
|
||||
|
||||
if (dollarFirstChars.length === 0) {
|
||||
// Simply replace the object with the update query contents
|
||||
newDoc = deepCopy(updateQuery);
|
||||
newDoc._id = obj._id;
|
||||
} else {
|
||||
// Apply modifiers
|
||||
modifiers = _.uniq(keys);
|
||||
newDoc = deepCopy(obj);
|
||||
modifiers.forEach(function (m) {
|
||||
var keys;
|
||||
|
||||
if (!modifierFunctions[m]) { throw new Error("Unknown modifier " + m); }
|
||||
|
||||
// Can't rely on Object.keys throwing on non objects since ES6
|
||||
// Not 100% satisfying as non objects can be interpreted as objects but no false negatives so we can live with it
|
||||
if (typeof updateQuery[m] !== 'object') {
|
||||
throw new Error("Modifier " + m + "'s argument must be an object");
|
||||
}
|
||||
|
||||
keys = Object.keys(updateQuery[m]);
|
||||
keys.forEach(function (k) {
|
||||
modifierFunctions[m](newDoc, k, updateQuery[m][k]);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Check result is valid and return it
|
||||
checkObject(newDoc);
|
||||
|
||||
if (obj._id !== newDoc._id) { throw new Error("You can't change a document's _id"); }
|
||||
return newDoc;
|
||||
};
|
||||
|
||||
|
||||
// ==============================================================
|
||||
// Finding documents
|
||||
// ==============================================================
|
||||
|
||||
/**
|
||||
* Get a value from object with dot notation
|
||||
* @param {Object} obj
|
||||
* @param {String} field
|
||||
*/
|
||||
function getDotValue (obj, field) {
|
||||
var fieldParts = typeof field === 'string' ? field.split('.') : field
|
||||
, i, objs;
|
||||
|
||||
if (!obj) { return undefined; } // field cannot be empty so that means we should return undefined so that nothing can match
|
||||
|
||||
if (fieldParts.length === 0) { return obj; }
|
||||
|
||||
if (fieldParts.length === 1) { return obj[fieldParts[0]]; }
|
||||
|
||||
if (util.isArray(obj[fieldParts[0]])) {
|
||||
// If the next field is an integer, return only this item of the array
|
||||
i = parseInt(fieldParts[1], 10);
|
||||
if (typeof i === 'number' && !isNaN(i)) {
|
||||
return getDotValue(obj[fieldParts[0]][i], fieldParts.slice(2))
|
||||
}
|
||||
|
||||
// Return the array of values
|
||||
objs = new Array();
|
||||
for (i = 0; i < obj[fieldParts[0]].length; i += 1) {
|
||||
objs.push(getDotValue(obj[fieldParts[0]][i], fieldParts.slice(1)));
|
||||
}
|
||||
return objs;
|
||||
} else {
|
||||
return getDotValue(obj[fieldParts[0]], fieldParts.slice(1));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check whether 'things' are equal
|
||||
* Things are defined as any native types (string, number, boolean, null, date) and objects
|
||||
* In the case of object, we check deep equality
|
||||
* Returns true if they are, false otherwise
|
||||
*/
|
||||
function areThingsEqual (a, b) {
|
||||
var aKeys , bKeys , i;
|
||||
|
||||
// Strings, booleans, numbers, null
|
||||
if (a === null || typeof a === 'string' || typeof a === 'boolean' || typeof a === 'number' ||
|
||||
b === null || typeof b === 'string' || typeof b === 'boolean' || typeof b === 'number') { return a === b; }
|
||||
|
||||
// Dates
|
||||
if (util.isDate(a) || util.isDate(b)) { return util.isDate(a) && util.isDate(b) && a.getTime() === b.getTime(); }
|
||||
|
||||
// Arrays (no match since arrays are used as a $in)
|
||||
// undefined (no match since they mean field doesn't exist and can't be serialized)
|
||||
if ((!(util.isArray(a) && util.isArray(b)) && (util.isArray(a) || util.isArray(b))) || a === undefined || b === undefined) { return false; }
|
||||
|
||||
// General objects (check for deep equality)
|
||||
// a and b should be objects at this point
|
||||
try {
|
||||
aKeys = Object.keys(a);
|
||||
bKeys = Object.keys(b);
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (aKeys.length !== bKeys.length) { return false; }
|
||||
for (i = 0; i < aKeys.length; i += 1) {
|
||||
if (bKeys.indexOf(aKeys[i]) === -1) { return false; }
|
||||
if (!areThingsEqual(a[aKeys[i]], b[aKeys[i]])) { return false; }
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check that two values are comparable
|
||||
*/
|
||||
function areComparable (a, b) {
|
||||
if (typeof a !== 'string' && typeof a !== 'number' && !util.isDate(a) &&
|
||||
typeof b !== 'string' && typeof b !== 'number' && !util.isDate(b)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (typeof a !== typeof b) { return false; }
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Arithmetic and comparison operators
|
||||
* @param {Native value} a Value in the object
|
||||
* @param {Native value} b Value in the query
|
||||
*/
|
||||
comparisonFunctions.$lt = function (a, b) {
|
||||
return areComparable(a, b) && a < b;
|
||||
};
|
||||
|
||||
comparisonFunctions.$lte = function (a, b) {
|
||||
return areComparable(a, b) && a <= b;
|
||||
};
|
||||
|
||||
comparisonFunctions.$gt = function (a, b) {
|
||||
return areComparable(a, b) && a > b;
|
||||
};
|
||||
|
||||
comparisonFunctions.$gte = function (a, b) {
|
||||
return areComparable(a, b) && a >= b;
|
||||
};
|
||||
|
||||
comparisonFunctions.$ne = function (a, b) {
|
||||
if (a === undefined) { return true; }
|
||||
return !areThingsEqual(a, b);
|
||||
};
|
||||
|
||||
comparisonFunctions.$in = function (a, b) {
|
||||
var i;
|
||||
|
||||
if (!util.isArray(b)) { throw new Error("$in operator called with a non-array"); }
|
||||
|
||||
for (i = 0; i < b.length; i += 1) {
|
||||
if (areThingsEqual(a, b[i])) { return true; }
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
comparisonFunctions.$nin = function (a, b) {
|
||||
if (!util.isArray(b)) { throw new Error("$nin operator called with a non-array"); }
|
||||
|
||||
return !comparisonFunctions.$in(a, b);
|
||||
};
|
||||
|
||||
comparisonFunctions.$regex = function (a, b) {
|
||||
if (!util.isRegExp(b)) { throw new Error("$regex operator called with non regular expression"); }
|
||||
|
||||
if (typeof a !== 'string') {
|
||||
return false
|
||||
} else {
|
||||
return b.test(a);
|
||||
}
|
||||
};
|
||||
|
||||
comparisonFunctions.$exists = function (value, exists) {
|
||||
if (exists || exists === '') { // This will be true for all values of exists except false, null, undefined and 0
|
||||
exists = true; // That's strange behaviour (we should only use true/false) but that's the way Mongo does it...
|
||||
} else {
|
||||
exists = false;
|
||||
}
|
||||
|
||||
if (value === undefined) {
|
||||
return !exists
|
||||
} else {
|
||||
return exists;
|
||||
}
|
||||
};
|
||||
|
||||
// Specific to arrays
|
||||
comparisonFunctions.$size = function (obj, value) {
|
||||
if (!util.isArray(obj)) { return false; }
|
||||
if (value % 1 !== 0) { throw new Error("$size operator called without an integer"); }
|
||||
|
||||
return (obj.length == value);
|
||||
};
|
||||
comparisonFunctions.$elemMatch = function (obj, value) {
|
||||
if (!util.isArray(obj)) { return false; }
|
||||
var i = obj.length;
|
||||
var result = false; // Initialize result
|
||||
while (i--) {
|
||||
if (match(obj[i], value)) { // If match for array element, return true
|
||||
result = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
arrayComparisonFunctions.$size = true;
|
||||
arrayComparisonFunctions.$elemMatch = true;
|
||||
|
||||
|
||||
/**
|
||||
* Match any of the subqueries
|
||||
* @param {Model} obj
|
||||
* @param {Array of Queries} query
|
||||
*/
|
||||
logicalOperators.$or = function (obj, query) {
|
||||
var i;
|
||||
|
||||
if (!util.isArray(query)) { throw new Error("$or operator used without an array"); }
|
||||
|
||||
for (i = 0; i < query.length; i += 1) {
|
||||
if (match(obj, query[i])) { return true; }
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Match all of the subqueries
|
||||
* @param {Model} obj
|
||||
* @param {Array of Queries} query
|
||||
*/
|
||||
logicalOperators.$and = function (obj, query) {
|
||||
var i;
|
||||
|
||||
if (!util.isArray(query)) { throw new Error("$and operator used without an array"); }
|
||||
|
||||
for (i = 0; i < query.length; i += 1) {
|
||||
if (!match(obj, query[i])) { return false; }
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Inverted match of the query
|
||||
* @param {Model} obj
|
||||
* @param {Query} query
|
||||
*/
|
||||
logicalOperators.$not = function (obj, query) {
|
||||
return !match(obj, query);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Use a function to match
|
||||
* @param {Model} obj
|
||||
* @param {Query} query
|
||||
*/
|
||||
logicalOperators.$where = function (obj, fn) {
|
||||
var result;
|
||||
|
||||
if (!_.isFunction(fn)) { throw new Error("$where operator used without a function"); }
|
||||
|
||||
result = fn.call(obj);
|
||||
if (!_.isBoolean(result)) { throw new Error("$where function must return boolean"); }
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Tell if a given document matches a query
|
||||
* @param {Object} obj Document to check
|
||||
* @param {Object} query
|
||||
*/
|
||||
function match (obj, query) {
|
||||
var queryKeys, queryKey, queryValue, i;
|
||||
|
||||
// Primitive query against a primitive type
|
||||
// This is a bit of a hack since we construct an object with an arbitrary key only to dereference it later
|
||||
// But I don't have time for a cleaner implementation now
|
||||
if (isPrimitiveType(obj) || isPrimitiveType(query)) {
|
||||
return matchQueryPart({ needAKey: obj }, 'needAKey', query);
|
||||
}
|
||||
|
||||
// Normal query
|
||||
queryKeys = Object.keys(query);
|
||||
for (i = 0; i < queryKeys.length; i += 1) {
|
||||
queryKey = queryKeys[i];
|
||||
queryValue = query[queryKey];
|
||||
|
||||
if (queryKey[0] === '$') {
|
||||
if (!logicalOperators[queryKey]) { throw new Error("Unknown logical operator " + queryKey); }
|
||||
if (!logicalOperators[queryKey](obj, queryValue)) { return false; }
|
||||
} else {
|
||||
if (!matchQueryPart(obj, queryKey, queryValue)) { return false; }
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Match an object against a specific { key: value } part of a query
|
||||
* if the treatObjAsValue flag is set, don't try to match every part separately, but the array as a whole
|
||||
*/
|
||||
function matchQueryPart (obj, queryKey, queryValue, treatObjAsValue) {
|
||||
var objValue = getDotValue(obj, queryKey)
|
||||
, i, keys, firstChars, dollarFirstChars;
|
||||
|
||||
// Check if the value is an array if we don't force a treatment as value
|
||||
if (util.isArray(objValue) && !treatObjAsValue) {
|
||||
// If the queryValue is an array, try to perform an exact match
|
||||
if (util.isArray(queryValue)) {
|
||||
return matchQueryPart(obj, queryKey, queryValue, true);
|
||||
}
|
||||
|
||||
// Check if we are using an array-specific comparison function
|
||||
if (queryValue !== null && typeof queryValue === 'object' && !util.isRegExp(queryValue)) {
|
||||
keys = Object.keys(queryValue);
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
if (arrayComparisonFunctions[keys[i]]) { return matchQueryPart(obj, queryKey, queryValue, true); }
|
||||
}
|
||||
}
|
||||
|
||||
// If not, treat it as an array of { obj, query } where there needs to be at least one match
|
||||
for (i = 0; i < objValue.length; i += 1) {
|
||||
if (matchQueryPart({ k: objValue[i] }, 'k', queryValue)) { return true; } // k here could be any string
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// queryValue is an actual object. Determine whether it contains comparison operators
|
||||
// or only normal fields. Mixed objects are not allowed
|
||||
if (queryValue !== null && typeof queryValue === 'object' && !util.isRegExp(queryValue) && !util.isArray(queryValue)) {
|
||||
keys = Object.keys(queryValue);
|
||||
firstChars = _.map(keys, function (item) { return item[0]; });
|
||||
dollarFirstChars = _.filter(firstChars, function (c) { return c === '$'; });
|
||||
|
||||
if (dollarFirstChars.length !== 0 && dollarFirstChars.length !== firstChars.length) {
|
||||
throw new Error("You cannot mix operators and normal fields");
|
||||
}
|
||||
|
||||
// queryValue is an object of this form: { $comparisonOperator1: value1, ... }
|
||||
if (dollarFirstChars.length > 0) {
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
if (!comparisonFunctions[keys[i]]) { throw new Error("Unknown comparison function " + keys[i]); }
|
||||
|
||||
if (!comparisonFunctions[keys[i]](objValue, queryValue[keys[i]])) { return false; }
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Using regular expressions with basic querying
|
||||
if (util.isRegExp(queryValue)) { return comparisonFunctions.$regex(objValue, queryValue); }
|
||||
|
||||
// queryValue is either a native value or a normal object
|
||||
// Basic matching is possible
|
||||
if (!areThingsEqual(objValue, queryValue)) { return false; }
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports.serialize = serialize;
|
||||
module.exports.deserialize = deserialize;
|
||||
module.exports.deepCopy = deepCopy;
|
||||
module.exports.checkObject = checkObject;
|
||||
module.exports.isPrimitiveType = isPrimitiveType;
|
||||
module.exports.modify = modify;
|
||||
module.exports.getDotValue = getDotValue;
|
||||
module.exports.match = match;
|
||||
module.exports.areThingsEqual = areThingsEqual;
|
||||
module.exports.compareThings = compareThings;
|
||||
314
resources/app/node_modules/nedb/lib/persistence.js
generated
vendored
Executable file
314
resources/app/node_modules/nedb/lib/persistence.js
generated
vendored
Executable file
@@ -0,0 +1,314 @@
|
||||
/**
|
||||
* Handle every persistence-related task
|
||||
* The interface Datastore expects to be implemented is
|
||||
* * Persistence.loadDatabase(callback) and callback has signature err
|
||||
* * Persistence.persistNewState(newDocs, callback) where newDocs is an array of documents and callback has signature err
|
||||
*/
|
||||
|
||||
var storage = require('./storage')
|
||||
, path = require('path')
|
||||
, model = require('./model')
|
||||
, async = require('async')
|
||||
, customUtils = require('./customUtils')
|
||||
, Index = require('./indexes')
|
||||
;
|
||||
|
||||
|
||||
/**
|
||||
* Create a new Persistence object for database options.db
|
||||
* @param {Datastore} options.db
|
||||
* @param {Boolean} options.nodeWebkitAppName Optional, specify the name of your NW app if you want options.filename to be relative to the directory where
|
||||
* Node Webkit stores application data such as cookies and local storage (the best place to store data in my opinion)
|
||||
*/
|
||||
function Persistence (options) {
|
||||
var i, j, randomString;
|
||||
|
||||
this.db = options.db;
|
||||
this.inMemoryOnly = this.db.inMemoryOnly;
|
||||
this.filename = this.db.filename;
|
||||
this.corruptAlertThreshold = options.corruptAlertThreshold !== undefined ? options.corruptAlertThreshold : 0.1;
|
||||
|
||||
if (!this.inMemoryOnly && this.filename && this.filename.charAt(this.filename.length - 1) === '~') {
|
||||
throw new Error("The datafile name can't end with a ~, which is reserved for crash safe backup files");
|
||||
}
|
||||
|
||||
// After serialization and before deserialization hooks with some basic sanity checks
|
||||
if (options.afterSerialization && !options.beforeDeserialization) {
|
||||
throw new Error("Serialization hook defined but deserialization hook undefined, cautiously refusing to start NeDB to prevent dataloss");
|
||||
}
|
||||
if (!options.afterSerialization && options.beforeDeserialization) {
|
||||
throw new Error("Serialization hook undefined but deserialization hook defined, cautiously refusing to start NeDB to prevent dataloss");
|
||||
}
|
||||
this.afterSerialization = options.afterSerialization || function (s) { return s; };
|
||||
this.beforeDeserialization = options.beforeDeserialization || function (s) { return s; };
|
||||
for (i = 1; i < 30; i += 1) {
|
||||
for (j = 0; j < 10; j += 1) {
|
||||
randomString = customUtils.uid(i);
|
||||
if (this.beforeDeserialization(this.afterSerialization(randomString)) !== randomString) {
|
||||
throw new Error("beforeDeserialization is not the reverse of afterSerialization, cautiously refusing to start NeDB to prevent dataloss");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For NW apps, store data in the same directory where NW stores application data
|
||||
if (this.filename && options.nodeWebkitAppName) {
|
||||
console.log("==================================================================");
|
||||
console.log("WARNING: The nodeWebkitAppName option is deprecated");
|
||||
console.log("To get the path to the directory where Node Webkit stores the data");
|
||||
console.log("for your app, use the internal nw.gui module like this");
|
||||
console.log("require('nw.gui').App.dataPath");
|
||||
console.log("See https://github.com/rogerwang/node-webkit/issues/500");
|
||||
console.log("==================================================================");
|
||||
this.filename = Persistence.getNWAppFilename(options.nodeWebkitAppName, this.filename);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Check if a directory exists and create it on the fly if it is not the case
|
||||
* cb is optional, signature: err
|
||||
*/
|
||||
Persistence.ensureDirectoryExists = function (dir, cb) {
|
||||
var callback = cb || function () {}
|
||||
;
|
||||
|
||||
storage.mkdirp(dir, function (err) { return callback(err); });
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Return the path the datafile if the given filename is relative to the directory where Node Webkit stores
|
||||
* data for this application. Probably the best place to store data
|
||||
*/
|
||||
Persistence.getNWAppFilename = function (appName, relativeFilename) {
|
||||
var home;
|
||||
|
||||
switch (process.platform) {
|
||||
case 'win32':
|
||||
case 'win64':
|
||||
home = process.env.LOCALAPPDATA || process.env.APPDATA;
|
||||
if (!home) { throw new Error("Couldn't find the base application data folder"); }
|
||||
home = path.join(home, appName);
|
||||
break;
|
||||
case 'darwin':
|
||||
home = process.env.HOME;
|
||||
if (!home) { throw new Error("Couldn't find the base application data directory"); }
|
||||
home = path.join(home, 'Library', 'Application Support', appName);
|
||||
break;
|
||||
case 'linux':
|
||||
home = process.env.HOME;
|
||||
if (!home) { throw new Error("Couldn't find the base application data directory"); }
|
||||
home = path.join(home, '.config', appName);
|
||||
break;
|
||||
default:
|
||||
throw new Error("Can't use the Node Webkit relative path for platform " + process.platform);
|
||||
break;
|
||||
}
|
||||
|
||||
return path.join(home, 'nedb-data', relativeFilename);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Persist cached database
|
||||
* This serves as a compaction function since the cache always contains only the number of documents in the collection
|
||||
* while the data file is append-only so it may grow larger
|
||||
* @param {Function} cb Optional callback, signature: err
|
||||
*/
|
||||
Persistence.prototype.persistCachedDatabase = function (cb) {
|
||||
var callback = cb || function () {}
|
||||
, toPersist = ''
|
||||
, self = this
|
||||
;
|
||||
|
||||
if (this.inMemoryOnly) { return callback(null); }
|
||||
|
||||
this.db.getAllData().forEach(function (doc) {
|
||||
toPersist += self.afterSerialization(model.serialize(doc)) + '\n';
|
||||
});
|
||||
Object.keys(this.db.indexes).forEach(function (fieldName) {
|
||||
if (fieldName != "_id") { // The special _id index is managed by datastore.js, the others need to be persisted
|
||||
toPersist += self.afterSerialization(model.serialize({ $$indexCreated: { fieldName: fieldName, unique: self.db.indexes[fieldName].unique, sparse: self.db.indexes[fieldName].sparse }})) + '\n';
|
||||
}
|
||||
});
|
||||
|
||||
storage.crashSafeWriteFile(this.filename, toPersist, function (err) {
|
||||
if (err) { return callback(err); }
|
||||
self.db.emit('compaction.done');
|
||||
return callback(null);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Queue a rewrite of the datafile
|
||||
*/
|
||||
Persistence.prototype.compactDatafile = function () {
|
||||
this.db.executor.push({ this: this, fn: this.persistCachedDatabase, arguments: [] });
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Set automatic compaction every interval ms
|
||||
* @param {Number} interval in milliseconds, with an enforced minimum of 5 seconds
|
||||
*/
|
||||
Persistence.prototype.setAutocompactionInterval = function (interval) {
|
||||
var self = this
|
||||
, minInterval = 5000
|
||||
, realInterval = Math.max(interval || 0, minInterval)
|
||||
;
|
||||
|
||||
this.stopAutocompaction();
|
||||
|
||||
this.autocompactionIntervalId = setInterval(function () {
|
||||
self.compactDatafile();
|
||||
}, realInterval);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Stop autocompaction (do nothing if autocompaction was not running)
|
||||
*/
|
||||
Persistence.prototype.stopAutocompaction = function () {
|
||||
if (this.autocompactionIntervalId) { clearInterval(this.autocompactionIntervalId); }
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Persist new state for the given newDocs (can be insertion, update or removal)
|
||||
* Use an append-only format
|
||||
* @param {Array} newDocs Can be empty if no doc was updated/removed
|
||||
* @param {Function} cb Optional, signature: err
|
||||
*/
|
||||
Persistence.prototype.persistNewState = function (newDocs, cb) {
|
||||
var self = this
|
||||
, toPersist = ''
|
||||
, callback = cb || function () {}
|
||||
;
|
||||
|
||||
// In-memory only datastore
|
||||
if (self.inMemoryOnly) { return callback(null); }
|
||||
|
||||
newDocs.forEach(function (doc) {
|
||||
toPersist += self.afterSerialization(model.serialize(doc)) + '\n';
|
||||
});
|
||||
|
||||
if (toPersist.length === 0) { return callback(null); }
|
||||
|
||||
storage.appendFile(self.filename, toPersist, 'utf8', function (err) {
|
||||
return callback(err);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* From a database's raw data, return the corresponding
|
||||
* machine understandable collection
|
||||
*/
|
||||
Persistence.prototype.treatRawData = function (rawData) {
|
||||
var data = rawData.split('\n')
|
||||
, dataById = {}
|
||||
, tdata = []
|
||||
, i
|
||||
, indexes = {}
|
||||
, corruptItems = -1 // Last line of every data file is usually blank so not really corrupt
|
||||
;
|
||||
|
||||
for (i = 0; i < data.length; i += 1) {
|
||||
var doc;
|
||||
|
||||
try {
|
||||
doc = model.deserialize(this.beforeDeserialization(data[i]));
|
||||
if (doc._id) {
|
||||
if (doc.$$deleted === true) {
|
||||
delete dataById[doc._id];
|
||||
} else {
|
||||
dataById[doc._id] = doc;
|
||||
}
|
||||
} else if (doc.$$indexCreated && doc.$$indexCreated.fieldName != undefined) {
|
||||
indexes[doc.$$indexCreated.fieldName] = doc.$$indexCreated;
|
||||
} else if (typeof doc.$$indexRemoved === "string") {
|
||||
delete indexes[doc.$$indexRemoved];
|
||||
}
|
||||
} catch (e) {
|
||||
corruptItems += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// A bit lenient on corruption
|
||||
if (data.length > 0 && corruptItems / data.length > this.corruptAlertThreshold) {
|
||||
throw new Error("More than " + Math.floor(100 * this.corruptAlertThreshold) + "% of the data file is corrupt, the wrong beforeDeserialization hook may be used. Cautiously refusing to start NeDB to prevent dataloss");
|
||||
}
|
||||
|
||||
Object.keys(dataById).forEach(function (k) {
|
||||
tdata.push(dataById[k]);
|
||||
});
|
||||
|
||||
return { data: tdata, indexes: indexes };
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Load the database
|
||||
* 1) Create all indexes
|
||||
* 2) Insert all data
|
||||
* 3) Compact the database
|
||||
* This means pulling data out of the data file or creating it if it doesn't exist
|
||||
* Also, all data is persisted right away, which has the effect of compacting the database file
|
||||
* This operation is very quick at startup for a big collection (60ms for ~10k docs)
|
||||
* @param {Function} cb Optional callback, signature: err
|
||||
*/
|
||||
Persistence.prototype.loadDatabase = function (cb) {
|
||||
var callback = cb || function () {}
|
||||
, self = this
|
||||
;
|
||||
|
||||
self.db.resetIndexes();
|
||||
|
||||
// In-memory only datastore
|
||||
if (self.inMemoryOnly) { return callback(null); }
|
||||
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
Persistence.ensureDirectoryExists(path.dirname(self.filename), function (err) {
|
||||
storage.ensureDatafileIntegrity(self.filename, function (err) {
|
||||
storage.readFile(self.filename, 'utf8', function (err, rawData) {
|
||||
if (err) { return cb(err); }
|
||||
|
||||
try {
|
||||
var treatedData = self.treatRawData(rawData);
|
||||
} catch (e) {
|
||||
return cb(e);
|
||||
}
|
||||
|
||||
// Recreate all indexes in the datafile
|
||||
Object.keys(treatedData.indexes).forEach(function (key) {
|
||||
self.db.indexes[key] = new Index(treatedData.indexes[key]);
|
||||
});
|
||||
|
||||
// Fill cached database (i.e. all indexes) with data
|
||||
try {
|
||||
self.db.resetIndexes(treatedData.data);
|
||||
} catch (e) {
|
||||
self.db.resetIndexes(); // Rollback any index which didn't fail
|
||||
return cb(e);
|
||||
}
|
||||
|
||||
self.db.persistence.persistCachedDatabase(cb);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
], function (err) {
|
||||
if (err) { return callback(err); }
|
||||
|
||||
self.db.executor.processBuffer();
|
||||
return callback(null);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = Persistence;
|
||||
136
resources/app/node_modules/nedb/lib/storage.js
generated
vendored
Executable file
136
resources/app/node_modules/nedb/lib/storage.js
generated
vendored
Executable file
@@ -0,0 +1,136 @@
|
||||
/**
|
||||
* Way data is stored for this database
|
||||
* For a Node.js/Node Webkit database it's the file system
|
||||
* For a browser-side database it's localforage which chooses the best option depending on user browser (IndexedDB then WebSQL then localStorage)
|
||||
*
|
||||
* This version is the Node.js/Node Webkit version
|
||||
* It's essentially fs, mkdirp and crash safe write and read functions
|
||||
*/
|
||||
|
||||
var fs = require('fs')
|
||||
, mkdirp = require('mkdirp')
|
||||
, async = require('async')
|
||||
, path = require('path')
|
||||
, storage = {}
|
||||
;
|
||||
|
||||
storage.exists = fs.exists;
|
||||
storage.rename = fs.rename;
|
||||
storage.writeFile = fs.writeFile;
|
||||
storage.unlink = fs.unlink;
|
||||
storage.appendFile = fs.appendFile;
|
||||
storage.readFile = fs.readFile;
|
||||
storage.mkdirp = mkdirp;
|
||||
|
||||
|
||||
/**
|
||||
* Explicit name ...
|
||||
*/
|
||||
storage.ensureFileDoesntExist = function (file, callback) {
|
||||
storage.exists(file, function (exists) {
|
||||
if (!exists) { return callback(null); }
|
||||
|
||||
storage.unlink(file, function (err) { return callback(err); });
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Flush data in OS buffer to storage if corresponding option is set
|
||||
* @param {String} options.filename
|
||||
* @param {Boolean} options.isDir Optional, defaults to false
|
||||
* If options is a string, it is assumed that the flush of the file (not dir) called options was requested
|
||||
*/
|
||||
storage.flushToStorage = function (options, callback) {
|
||||
var filename, flags;
|
||||
if (typeof options === 'string') {
|
||||
filename = options;
|
||||
flags = 'r+';
|
||||
} else {
|
||||
filename = options.filename;
|
||||
flags = options.isDir ? 'r' : 'r+';
|
||||
}
|
||||
|
||||
// Windows can't fsync (FlushFileBuffers) directories. We can live with this as it cannot cause 100% dataloss
|
||||
// except in the very rare event of the first time database is loaded and a crash happens
|
||||
if (flags === 'r' && (process.platform === 'win32' || process.platform === 'win64')) { return callback(null); }
|
||||
|
||||
fs.open(filename, flags, function (err, fd) {
|
||||
if (err) { return callback(err); }
|
||||
fs.fsync(fd, function (errFS) {
|
||||
fs.close(fd, function (errC) {
|
||||
if (errFS || errC) {
|
||||
var e = new Error('Failed to flush to storage');
|
||||
e.errorOnFsync = errFS;
|
||||
e.errorOnClose = errC;
|
||||
return callback(e);
|
||||
} else {
|
||||
return callback(null);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost)
|
||||
* @param {String} filename
|
||||
* @param {String} data
|
||||
* @param {Function} cb Optional callback, signature: err
|
||||
*/
|
||||
storage.crashSafeWriteFile = function (filename, data, cb) {
|
||||
var callback = cb || function () {}
|
||||
, tempFilename = filename + '~';
|
||||
|
||||
async.waterfall([
|
||||
async.apply(storage.flushToStorage, { filename: path.dirname(filename), isDir: true })
|
||||
, function (cb) {
|
||||
storage.exists(filename, function (exists) {
|
||||
if (exists) {
|
||||
storage.flushToStorage(filename, function (err) { return cb(err); });
|
||||
} else {
|
||||
return cb();
|
||||
}
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
storage.writeFile(tempFilename, data, function (err) { return cb(err); });
|
||||
}
|
||||
, async.apply(storage.flushToStorage, tempFilename)
|
||||
, function (cb) {
|
||||
storage.rename(tempFilename, filename, function (err) { return cb(err); });
|
||||
}
|
||||
, async.apply(storage.flushToStorage, { filename: path.dirname(filename), isDir: true })
|
||||
], function (err) { return callback(err); })
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Ensure the datafile contains all the data, even if there was a crash during a full file write
|
||||
* @param {String} filename
|
||||
* @param {Function} callback signature: err
|
||||
*/
|
||||
storage.ensureDatafileIntegrity = function (filename, callback) {
|
||||
var tempFilename = filename + '~';
|
||||
|
||||
storage.exists(filename, function (filenameExists) {
|
||||
// Write was successful
|
||||
if (filenameExists) { return callback(null); }
|
||||
|
||||
storage.exists(tempFilename, function (oldFilenameExists) {
|
||||
// New database
|
||||
if (!oldFilenameExists) {
|
||||
return storage.writeFile(filename, '', 'utf8', function (err) { callback(err); });
|
||||
}
|
||||
|
||||
// Write failed, use old version
|
||||
storage.rename(tempFilename, filename, function (err) { return callback(err); });
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = storage;
|
||||
19
resources/app/node_modules/nedb/node_modules/async/LICENSE
generated
vendored
Normal file
19
resources/app/node_modules/nedb/node_modules/async/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
Copyright (c) 2010 Caolan McMahon
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
11
resources/app/node_modules/nedb/node_modules/async/component.json
generated
vendored
Normal file
11
resources/app/node_modules/nedb/node_modules/async/component.json
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"name": "async",
|
||||
"repo": "caolan/async",
|
||||
"description": "Higher-order functions and common patterns for asynchronous code",
|
||||
"version": "0.1.23",
|
||||
"keywords": [],
|
||||
"dependencies": {},
|
||||
"development": {},
|
||||
"main": "lib/async.js",
|
||||
"scripts": [ "lib/async.js" ]
|
||||
}
|
||||
958
resources/app/node_modules/nedb/node_modules/async/lib/async.js
generated
vendored
Executable file
958
resources/app/node_modules/nedb/node_modules/async/lib/async.js
generated
vendored
Executable file
@@ -0,0 +1,958 @@
|
||||
/*global setImmediate: false, setTimeout: false, console: false */
|
||||
(function () {
|
||||
|
||||
var async = {};
|
||||
|
||||
// global on the server, window in the browser
|
||||
var root, previous_async;
|
||||
|
||||
root = this;
|
||||
if (root != null) {
|
||||
previous_async = root.async;
|
||||
}
|
||||
|
||||
async.noConflict = function () {
|
||||
root.async = previous_async;
|
||||
return async;
|
||||
};
|
||||
|
||||
function only_once(fn) {
|
||||
var called = false;
|
||||
return function() {
|
||||
if (called) throw new Error("Callback was already called.");
|
||||
called = true;
|
||||
fn.apply(root, arguments);
|
||||
}
|
||||
}
|
||||
|
||||
//// cross-browser compatiblity functions ////
|
||||
|
||||
var _each = function (arr, iterator) {
|
||||
if (arr.forEach) {
|
||||
return arr.forEach(iterator);
|
||||
}
|
||||
for (var i = 0; i < arr.length; i += 1) {
|
||||
iterator(arr[i], i, arr);
|
||||
}
|
||||
};
|
||||
|
||||
var _map = function (arr, iterator) {
|
||||
if (arr.map) {
|
||||
return arr.map(iterator);
|
||||
}
|
||||
var results = [];
|
||||
_each(arr, function (x, i, a) {
|
||||
results.push(iterator(x, i, a));
|
||||
});
|
||||
return results;
|
||||
};
|
||||
|
||||
var _reduce = function (arr, iterator, memo) {
|
||||
if (arr.reduce) {
|
||||
return arr.reduce(iterator, memo);
|
||||
}
|
||||
_each(arr, function (x, i, a) {
|
||||
memo = iterator(memo, x, i, a);
|
||||
});
|
||||
return memo;
|
||||
};
|
||||
|
||||
var _keys = function (obj) {
|
||||
if (Object.keys) {
|
||||
return Object.keys(obj);
|
||||
}
|
||||
var keys = [];
|
||||
for (var k in obj) {
|
||||
if (obj.hasOwnProperty(k)) {
|
||||
keys.push(k);
|
||||
}
|
||||
}
|
||||
return keys;
|
||||
};
|
||||
|
||||
//// exported async module functions ////
|
||||
|
||||
//// nextTick implementation with browser-compatible fallback ////
|
||||
if (typeof process === 'undefined' || !(process.nextTick)) {
|
||||
if (typeof setImmediate === 'function') {
|
||||
async.nextTick = function (fn) {
|
||||
// not a direct alias for IE10 compatibility
|
||||
setImmediate(fn);
|
||||
};
|
||||
async.setImmediate = async.nextTick;
|
||||
}
|
||||
else {
|
||||
async.nextTick = function (fn) {
|
||||
setTimeout(fn, 0);
|
||||
};
|
||||
async.setImmediate = async.nextTick;
|
||||
}
|
||||
}
|
||||
else {
|
||||
async.nextTick = process.nextTick;
|
||||
if (typeof setImmediate !== 'undefined') {
|
||||
async.setImmediate = function (fn) {
|
||||
// not a direct alias for IE10 compatibility
|
||||
setImmediate(fn);
|
||||
};
|
||||
}
|
||||
else {
|
||||
async.setImmediate = async.nextTick;
|
||||
}
|
||||
}
|
||||
|
||||
async.each = function (arr, iterator, callback) {
|
||||
callback = callback || function () {};
|
||||
if (!arr.length) {
|
||||
return callback();
|
||||
}
|
||||
var completed = 0;
|
||||
_each(arr, function (x) {
|
||||
iterator(x, only_once(function (err) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
completed += 1;
|
||||
if (completed >= arr.length) {
|
||||
callback(null);
|
||||
}
|
||||
}
|
||||
}));
|
||||
});
|
||||
};
|
||||
async.forEach = async.each;
|
||||
|
||||
async.eachSeries = function (arr, iterator, callback) {
|
||||
callback = callback || function () {};
|
||||
if (!arr.length) {
|
||||
return callback();
|
||||
}
|
||||
var completed = 0;
|
||||
var iterate = function () {
|
||||
iterator(arr[completed], function (err) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
completed += 1;
|
||||
if (completed >= arr.length) {
|
||||
callback(null);
|
||||
}
|
||||
else {
|
||||
iterate();
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
iterate();
|
||||
};
|
||||
async.forEachSeries = async.eachSeries;
|
||||
|
||||
async.eachLimit = function (arr, limit, iterator, callback) {
|
||||
var fn = _eachLimit(limit);
|
||||
fn.apply(null, [arr, iterator, callback]);
|
||||
};
|
||||
async.forEachLimit = async.eachLimit;
|
||||
|
||||
var _eachLimit = function (limit) {
|
||||
|
||||
return function (arr, iterator, callback) {
|
||||
callback = callback || function () {};
|
||||
if (!arr.length || limit <= 0) {
|
||||
return callback();
|
||||
}
|
||||
var completed = 0;
|
||||
var started = 0;
|
||||
var running = 0;
|
||||
|
||||
(function replenish () {
|
||||
if (completed >= arr.length) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
while (running < limit && started < arr.length) {
|
||||
started += 1;
|
||||
running += 1;
|
||||
iterator(arr[started - 1], function (err) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
completed += 1;
|
||||
running -= 1;
|
||||
if (completed >= arr.length) {
|
||||
callback();
|
||||
}
|
||||
else {
|
||||
replenish();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
})();
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
var doParallel = function (fn) {
|
||||
return function () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
return fn.apply(null, [async.each].concat(args));
|
||||
};
|
||||
};
|
||||
var doParallelLimit = function(limit, fn) {
|
||||
return function () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
return fn.apply(null, [_eachLimit(limit)].concat(args));
|
||||
};
|
||||
};
|
||||
var doSeries = function (fn) {
|
||||
return function () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
return fn.apply(null, [async.eachSeries].concat(args));
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
var _asyncMap = function (eachfn, arr, iterator, callback) {
|
||||
var results = [];
|
||||
arr = _map(arr, function (x, i) {
|
||||
return {index: i, value: x};
|
||||
});
|
||||
eachfn(arr, function (x, callback) {
|
||||
iterator(x.value, function (err, v) {
|
||||
results[x.index] = v;
|
||||
callback(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, results);
|
||||
});
|
||||
};
|
||||
async.map = doParallel(_asyncMap);
|
||||
async.mapSeries = doSeries(_asyncMap);
|
||||
async.mapLimit = function (arr, limit, iterator, callback) {
|
||||
return _mapLimit(limit)(arr, iterator, callback);
|
||||
};
|
||||
|
||||
var _mapLimit = function(limit) {
|
||||
return doParallelLimit(limit, _asyncMap);
|
||||
};
|
||||
|
||||
// reduce only has a series version, as doing reduce in parallel won't
|
||||
// work in many situations.
|
||||
async.reduce = function (arr, memo, iterator, callback) {
|
||||
async.eachSeries(arr, function (x, callback) {
|
||||
iterator(memo, x, function (err, v) {
|
||||
memo = v;
|
||||
callback(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, memo);
|
||||
});
|
||||
};
|
||||
// inject alias
|
||||
async.inject = async.reduce;
|
||||
// foldl alias
|
||||
async.foldl = async.reduce;
|
||||
|
||||
async.reduceRight = function (arr, memo, iterator, callback) {
|
||||
var reversed = _map(arr, function (x) {
|
||||
return x;
|
||||
}).reverse();
|
||||
async.reduce(reversed, memo, iterator, callback);
|
||||
};
|
||||
// foldr alias
|
||||
async.foldr = async.reduceRight;
|
||||
|
||||
var _filter = function (eachfn, arr, iterator, callback) {
|
||||
var results = [];
|
||||
arr = _map(arr, function (x, i) {
|
||||
return {index: i, value: x};
|
||||
});
|
||||
eachfn(arr, function (x, callback) {
|
||||
iterator(x.value, function (v) {
|
||||
if (v) {
|
||||
results.push(x);
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}, function (err) {
|
||||
callback(_map(results.sort(function (a, b) {
|
||||
return a.index - b.index;
|
||||
}), function (x) {
|
||||
return x.value;
|
||||
}));
|
||||
});
|
||||
};
|
||||
async.filter = doParallel(_filter);
|
||||
async.filterSeries = doSeries(_filter);
|
||||
// select alias
|
||||
async.select = async.filter;
|
||||
async.selectSeries = async.filterSeries;
|
||||
|
||||
var _reject = function (eachfn, arr, iterator, callback) {
|
||||
var results = [];
|
||||
arr = _map(arr, function (x, i) {
|
||||
return {index: i, value: x};
|
||||
});
|
||||
eachfn(arr, function (x, callback) {
|
||||
iterator(x.value, function (v) {
|
||||
if (!v) {
|
||||
results.push(x);
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}, function (err) {
|
||||
callback(_map(results.sort(function (a, b) {
|
||||
return a.index - b.index;
|
||||
}), function (x) {
|
||||
return x.value;
|
||||
}));
|
||||
});
|
||||
};
|
||||
async.reject = doParallel(_reject);
|
||||
async.rejectSeries = doSeries(_reject);
|
||||
|
||||
var _detect = function (eachfn, arr, iterator, main_callback) {
|
||||
eachfn(arr, function (x, callback) {
|
||||
iterator(x, function (result) {
|
||||
if (result) {
|
||||
main_callback(x);
|
||||
main_callback = function () {};
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
}, function (err) {
|
||||
main_callback();
|
||||
});
|
||||
};
|
||||
async.detect = doParallel(_detect);
|
||||
async.detectSeries = doSeries(_detect);
|
||||
|
||||
async.some = function (arr, iterator, main_callback) {
|
||||
async.each(arr, function (x, callback) {
|
||||
iterator(x, function (v) {
|
||||
if (v) {
|
||||
main_callback(true);
|
||||
main_callback = function () {};
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}, function (err) {
|
||||
main_callback(false);
|
||||
});
|
||||
};
|
||||
// any alias
|
||||
async.any = async.some;
|
||||
|
||||
async.every = function (arr, iterator, main_callback) {
|
||||
async.each(arr, function (x, callback) {
|
||||
iterator(x, function (v) {
|
||||
if (!v) {
|
||||
main_callback(false);
|
||||
main_callback = function () {};
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}, function (err) {
|
||||
main_callback(true);
|
||||
});
|
||||
};
|
||||
// all alias
|
||||
async.all = async.every;
|
||||
|
||||
async.sortBy = function (arr, iterator, callback) {
|
||||
async.map(arr, function (x, callback) {
|
||||
iterator(x, function (err, criteria) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
}
|
||||
else {
|
||||
callback(null, {value: x, criteria: criteria});
|
||||
}
|
||||
});
|
||||
}, function (err, results) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
else {
|
||||
var fn = function (left, right) {
|
||||
var a = left.criteria, b = right.criteria;
|
||||
return a < b ? -1 : a > b ? 1 : 0;
|
||||
};
|
||||
callback(null, _map(results.sort(fn), function (x) {
|
||||
return x.value;
|
||||
}));
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
async.auto = function (tasks, callback) {
|
||||
callback = callback || function () {};
|
||||
var keys = _keys(tasks);
|
||||
if (!keys.length) {
|
||||
return callback(null);
|
||||
}
|
||||
|
||||
var results = {};
|
||||
|
||||
var listeners = [];
|
||||
var addListener = function (fn) {
|
||||
listeners.unshift(fn);
|
||||
};
|
||||
var removeListener = function (fn) {
|
||||
for (var i = 0; i < listeners.length; i += 1) {
|
||||
if (listeners[i] === fn) {
|
||||
listeners.splice(i, 1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
var taskComplete = function () {
|
||||
_each(listeners.slice(0), function (fn) {
|
||||
fn();
|
||||
});
|
||||
};
|
||||
|
||||
addListener(function () {
|
||||
if (_keys(results).length === keys.length) {
|
||||
callback(null, results);
|
||||
callback = function () {};
|
||||
}
|
||||
});
|
||||
|
||||
_each(keys, function (k) {
|
||||
var task = (tasks[k] instanceof Function) ? [tasks[k]]: tasks[k];
|
||||
var taskCallback = function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
if (err) {
|
||||
var safeResults = {};
|
||||
_each(_keys(results), function(rkey) {
|
||||
safeResults[rkey] = results[rkey];
|
||||
});
|
||||
safeResults[k] = args;
|
||||
callback(err, safeResults);
|
||||
// stop subsequent errors hitting callback multiple times
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
results[k] = args;
|
||||
async.setImmediate(taskComplete);
|
||||
}
|
||||
};
|
||||
var requires = task.slice(0, Math.abs(task.length - 1)) || [];
|
||||
var ready = function () {
|
||||
return _reduce(requires, function (a, x) {
|
||||
return (a && results.hasOwnProperty(x));
|
||||
}, true) && !results.hasOwnProperty(k);
|
||||
};
|
||||
if (ready()) {
|
||||
task[task.length - 1](taskCallback, results);
|
||||
}
|
||||
else {
|
||||
var listener = function () {
|
||||
if (ready()) {
|
||||
removeListener(listener);
|
||||
task[task.length - 1](taskCallback, results);
|
||||
}
|
||||
};
|
||||
addListener(listener);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
async.waterfall = function (tasks, callback) {
|
||||
callback = callback || function () {};
|
||||
if (tasks.constructor !== Array) {
|
||||
var err = new Error('First argument to waterfall must be an array of functions');
|
||||
return callback(err);
|
||||
}
|
||||
if (!tasks.length) {
|
||||
return callback();
|
||||
}
|
||||
var wrapIterator = function (iterator) {
|
||||
return function (err) {
|
||||
if (err) {
|
||||
callback.apply(null, arguments);
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
var next = iterator.next();
|
||||
if (next) {
|
||||
args.push(wrapIterator(next));
|
||||
}
|
||||
else {
|
||||
args.push(callback);
|
||||
}
|
||||
async.setImmediate(function () {
|
||||
iterator.apply(null, args);
|
||||
});
|
||||
}
|
||||
};
|
||||
};
|
||||
wrapIterator(async.iterator(tasks))();
|
||||
};
|
||||
|
||||
var _parallel = function(eachfn, tasks, callback) {
|
||||
callback = callback || function () {};
|
||||
if (tasks.constructor === Array) {
|
||||
eachfn.map(tasks, function (fn, callback) {
|
||||
if (fn) {
|
||||
fn(function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
callback.call(null, err, args);
|
||||
});
|
||||
}
|
||||
}, callback);
|
||||
}
|
||||
else {
|
||||
var results = {};
|
||||
eachfn.each(_keys(tasks), function (k, callback) {
|
||||
tasks[k](function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
results[k] = args;
|
||||
callback(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, results);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
async.parallel = function (tasks, callback) {
|
||||
_parallel({ map: async.map, each: async.each }, tasks, callback);
|
||||
};
|
||||
|
||||
async.parallelLimit = function(tasks, limit, callback) {
|
||||
_parallel({ map: _mapLimit(limit), each: _eachLimit(limit) }, tasks, callback);
|
||||
};
|
||||
|
||||
async.series = function (tasks, callback) {
|
||||
callback = callback || function () {};
|
||||
if (tasks.constructor === Array) {
|
||||
async.mapSeries(tasks, function (fn, callback) {
|
||||
if (fn) {
|
||||
fn(function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
callback.call(null, err, args);
|
||||
});
|
||||
}
|
||||
}, callback);
|
||||
}
|
||||
else {
|
||||
var results = {};
|
||||
async.eachSeries(_keys(tasks), function (k, callback) {
|
||||
tasks[k](function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
results[k] = args;
|
||||
callback(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, results);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
async.iterator = function (tasks) {
|
||||
var makeCallback = function (index) {
|
||||
var fn = function () {
|
||||
if (tasks.length) {
|
||||
tasks[index].apply(null, arguments);
|
||||
}
|
||||
return fn.next();
|
||||
};
|
||||
fn.next = function () {
|
||||
return (index < tasks.length - 1) ? makeCallback(index + 1): null;
|
||||
};
|
||||
return fn;
|
||||
};
|
||||
return makeCallback(0);
|
||||
};
|
||||
|
||||
async.apply = function (fn) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
return function () {
|
||||
return fn.apply(
|
||||
null, args.concat(Array.prototype.slice.call(arguments))
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
var _concat = function (eachfn, arr, fn, callback) {
|
||||
var r = [];
|
||||
eachfn(arr, function (x, cb) {
|
||||
fn(x, function (err, y) {
|
||||
r = r.concat(y || []);
|
||||
cb(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, r);
|
||||
});
|
||||
};
|
||||
async.concat = doParallel(_concat);
|
||||
async.concatSeries = doSeries(_concat);
|
||||
|
||||
async.whilst = function (test, iterator, callback) {
|
||||
if (test()) {
|
||||
iterator(function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
async.whilst(test, iterator, callback);
|
||||
});
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
};
|
||||
|
||||
async.doWhilst = function (iterator, test, callback) {
|
||||
iterator(function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
if (test()) {
|
||||
async.doWhilst(iterator, test, callback);
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
async.until = function (test, iterator, callback) {
|
||||
if (!test()) {
|
||||
iterator(function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
async.until(test, iterator, callback);
|
||||
});
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
};
|
||||
|
||||
async.doUntil = function (iterator, test, callback) {
|
||||
iterator(function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
if (!test()) {
|
||||
async.doUntil(iterator, test, callback);
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
async.queue = function (worker, concurrency) {
|
||||
if (concurrency === undefined) {
|
||||
concurrency = 1;
|
||||
}
|
||||
function _insert(q, data, pos, callback) {
|
||||
if(data.constructor !== Array) {
|
||||
data = [data];
|
||||
}
|
||||
_each(data, function(task) {
|
||||
var item = {
|
||||
data: task,
|
||||
callback: typeof callback === 'function' ? callback : null
|
||||
};
|
||||
|
||||
if (pos) {
|
||||
q.tasks.unshift(item);
|
||||
} else {
|
||||
q.tasks.push(item);
|
||||
}
|
||||
|
||||
if (q.saturated && q.tasks.length === concurrency) {
|
||||
q.saturated();
|
||||
}
|
||||
async.setImmediate(q.process);
|
||||
});
|
||||
}
|
||||
|
||||
var workers = 0;
|
||||
var q = {
|
||||
tasks: [],
|
||||
concurrency: concurrency,
|
||||
saturated: null,
|
||||
empty: null,
|
||||
drain: null,
|
||||
push: function (data, callback) {
|
||||
_insert(q, data, false, callback);
|
||||
},
|
||||
unshift: function (data, callback) {
|
||||
_insert(q, data, true, callback);
|
||||
},
|
||||
process: function () {
|
||||
if (workers < q.concurrency && q.tasks.length) {
|
||||
var task = q.tasks.shift();
|
||||
if (q.empty && q.tasks.length === 0) {
|
||||
q.empty();
|
||||
}
|
||||
workers += 1;
|
||||
var next = function () {
|
||||
workers -= 1;
|
||||
if (task.callback) {
|
||||
task.callback.apply(task, arguments);
|
||||
}
|
||||
if (q.drain && q.tasks.length + workers === 0) {
|
||||
q.drain();
|
||||
}
|
||||
q.process();
|
||||
};
|
||||
var cb = only_once(next);
|
||||
worker(task.data, cb);
|
||||
}
|
||||
},
|
||||
length: function () {
|
||||
return q.tasks.length;
|
||||
},
|
||||
running: function () {
|
||||
return workers;
|
||||
}
|
||||
};
|
||||
return q;
|
||||
};
|
||||
|
||||
async.cargo = function (worker, payload) {
|
||||
var working = false,
|
||||
tasks = [];
|
||||
|
||||
var cargo = {
|
||||
tasks: tasks,
|
||||
payload: payload,
|
||||
saturated: null,
|
||||
empty: null,
|
||||
drain: null,
|
||||
push: function (data, callback) {
|
||||
if(data.constructor !== Array) {
|
||||
data = [data];
|
||||
}
|
||||
_each(data, function(task) {
|
||||
tasks.push({
|
||||
data: task,
|
||||
callback: typeof callback === 'function' ? callback : null
|
||||
});
|
||||
if (cargo.saturated && tasks.length === payload) {
|
||||
cargo.saturated();
|
||||
}
|
||||
});
|
||||
async.setImmediate(cargo.process);
|
||||
},
|
||||
process: function process() {
|
||||
if (working) return;
|
||||
if (tasks.length === 0) {
|
||||
if(cargo.drain) cargo.drain();
|
||||
return;
|
||||
}
|
||||
|
||||
var ts = typeof payload === 'number'
|
||||
? tasks.splice(0, payload)
|
||||
: tasks.splice(0);
|
||||
|
||||
var ds = _map(ts, function (task) {
|
||||
return task.data;
|
||||
});
|
||||
|
||||
if(cargo.empty) cargo.empty();
|
||||
working = true;
|
||||
worker(ds, function () {
|
||||
working = false;
|
||||
|
||||
var args = arguments;
|
||||
_each(ts, function (data) {
|
||||
if (data.callback) {
|
||||
data.callback.apply(null, args);
|
||||
}
|
||||
});
|
||||
|
||||
process();
|
||||
});
|
||||
},
|
||||
length: function () {
|
||||
return tasks.length;
|
||||
},
|
||||
running: function () {
|
||||
return working;
|
||||
}
|
||||
};
|
||||
return cargo;
|
||||
};
|
||||
|
||||
var _console_fn = function (name) {
|
||||
return function (fn) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
fn.apply(null, args.concat([function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (typeof console !== 'undefined') {
|
||||
if (err) {
|
||||
if (console.error) {
|
||||
console.error(err);
|
||||
}
|
||||
}
|
||||
else if (console[name]) {
|
||||
_each(args, function (x) {
|
||||
console[name](x);
|
||||
});
|
||||
}
|
||||
}
|
||||
}]));
|
||||
};
|
||||
};
|
||||
async.log = _console_fn('log');
|
||||
async.dir = _console_fn('dir');
|
||||
/*async.info = _console_fn('info');
|
||||
async.warn = _console_fn('warn');
|
||||
async.error = _console_fn('error');*/
|
||||
|
||||
async.memoize = function (fn, hasher) {
|
||||
var memo = {};
|
||||
var queues = {};
|
||||
hasher = hasher || function (x) {
|
||||
return x;
|
||||
};
|
||||
var memoized = function () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
var callback = args.pop();
|
||||
var key = hasher.apply(null, args);
|
||||
if (key in memo) {
|
||||
callback.apply(null, memo[key]);
|
||||
}
|
||||
else if (key in queues) {
|
||||
queues[key].push(callback);
|
||||
}
|
||||
else {
|
||||
queues[key] = [callback];
|
||||
fn.apply(null, args.concat([function () {
|
||||
memo[key] = arguments;
|
||||
var q = queues[key];
|
||||
delete queues[key];
|
||||
for (var i = 0, l = q.length; i < l; i++) {
|
||||
q[i].apply(null, arguments);
|
||||
}
|
||||
}]));
|
||||
}
|
||||
};
|
||||
memoized.memo = memo;
|
||||
memoized.unmemoized = fn;
|
||||
return memoized;
|
||||
};
|
||||
|
||||
async.unmemoize = function (fn) {
|
||||
return function () {
|
||||
return (fn.unmemoized || fn).apply(null, arguments);
|
||||
};
|
||||
};
|
||||
|
||||
async.times = function (count, iterator, callback) {
|
||||
var counter = [];
|
||||
for (var i = 0; i < count; i++) {
|
||||
counter.push(i);
|
||||
}
|
||||
return async.map(counter, iterator, callback);
|
||||
};
|
||||
|
||||
async.timesSeries = function (count, iterator, callback) {
|
||||
var counter = [];
|
||||
for (var i = 0; i < count; i++) {
|
||||
counter.push(i);
|
||||
}
|
||||
return async.mapSeries(counter, iterator, callback);
|
||||
};
|
||||
|
||||
async.compose = function (/* functions... */) {
|
||||
var fns = Array.prototype.reverse.call(arguments);
|
||||
return function () {
|
||||
var that = this;
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
var callback = args.pop();
|
||||
async.reduce(fns, args, function (newargs, fn, cb) {
|
||||
fn.apply(that, newargs.concat([function () {
|
||||
var err = arguments[0];
|
||||
var nextargs = Array.prototype.slice.call(arguments, 1);
|
||||
cb(err, nextargs);
|
||||
}]))
|
||||
},
|
||||
function (err, results) {
|
||||
callback.apply(that, [err].concat(results));
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
var _applyEach = function (eachfn, fns /*args...*/) {
|
||||
var go = function () {
|
||||
var that = this;
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
var callback = args.pop();
|
||||
return eachfn(fns, function (fn, cb) {
|
||||
fn.apply(that, args.concat([cb]));
|
||||
},
|
||||
callback);
|
||||
};
|
||||
if (arguments.length > 2) {
|
||||
var args = Array.prototype.slice.call(arguments, 2);
|
||||
return go.apply(this, args);
|
||||
}
|
||||
else {
|
||||
return go;
|
||||
}
|
||||
};
|
||||
async.applyEach = doParallel(_applyEach);
|
||||
async.applyEachSeries = doSeries(_applyEach);
|
||||
|
||||
async.forever = function (fn, callback) {
|
||||
function next(err) {
|
||||
if (err) {
|
||||
if (callback) {
|
||||
return callback(err);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
fn(next);
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
||||
// AMD / RequireJS
|
||||
if (typeof define !== 'undefined' && define.amd) {
|
||||
define([], function () {
|
||||
return async;
|
||||
});
|
||||
}
|
||||
// Node.js
|
||||
else if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = async;
|
||||
}
|
||||
// included directly via <script> tag
|
||||
else {
|
||||
root.async = async;
|
||||
}
|
||||
|
||||
}());
|
||||
30
resources/app/node_modules/nedb/node_modules/async/package.json
generated
vendored
Normal file
30
resources/app/node_modules/nedb/node_modules/async/package.json
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "async",
|
||||
"description": "Higher-order functions and common patterns for asynchronous code",
|
||||
"main": "./lib/async",
|
||||
"author": "Caolan McMahon",
|
||||
"version": "0.2.10",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/caolan/async.git"
|
||||
},
|
||||
"licenses": [
|
||||
{
|
||||
"type": "MIT",
|
||||
"url": "https://github.com/caolan/async/raw/master/LICENSE"
|
||||
}
|
||||
],
|
||||
"devDependencies": {
|
||||
"nodeunit": ">0.0.0",
|
||||
"uglify-js": "1.2.x",
|
||||
"nodelint": ">0.0.0"
|
||||
},
|
||||
"jam": {
|
||||
"main": "lib/async.js",
|
||||
"include": [
|
||||
"lib/async.js",
|
||||
"README.md",
|
||||
"LICENSE"
|
||||
]
|
||||
}
|
||||
}
|
||||
21
resources/app/node_modules/nedb/node_modules/mkdirp/LICENSE
generated
vendored
Normal file
21
resources/app/node_modules/nedb/node_modules/mkdirp/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
Copyright 2010 James Halliday (mail@substack.net)
|
||||
|
||||
This project is free software released under the MIT/X11 license:
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
33
resources/app/node_modules/nedb/node_modules/mkdirp/bin/cmd.js
generated
vendored
Executable file
33
resources/app/node_modules/nedb/node_modules/mkdirp/bin/cmd.js
generated
vendored
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
var mkdirp = require('../');
|
||||
var minimist = require('minimist');
|
||||
var fs = require('fs');
|
||||
|
||||
var argv = minimist(process.argv.slice(2), {
|
||||
alias: { m: 'mode', h: 'help' },
|
||||
string: [ 'mode' ]
|
||||
});
|
||||
if (argv.help) {
|
||||
fs.createReadStream(__dirname + '/usage.txt').pipe(process.stdout);
|
||||
return;
|
||||
}
|
||||
|
||||
var paths = argv._.slice();
|
||||
var mode = argv.mode ? parseInt(argv.mode, 8) : undefined;
|
||||
|
||||
(function next () {
|
||||
if (paths.length === 0) return;
|
||||
var p = paths.shift();
|
||||
|
||||
if (mode === undefined) mkdirp(p, cb)
|
||||
else mkdirp(p, mode, cb)
|
||||
|
||||
function cb (err) {
|
||||
if (err) {
|
||||
console.error(err.message);
|
||||
process.exit(1);
|
||||
}
|
||||
else next();
|
||||
}
|
||||
})();
|
||||
12
resources/app/node_modules/nedb/node_modules/mkdirp/bin/usage.txt
generated
vendored
Normal file
12
resources/app/node_modules/nedb/node_modules/mkdirp/bin/usage.txt
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
usage: mkdirp [DIR1,DIR2..] {OPTIONS}
|
||||
|
||||
Create each supplied directory including any necessary parent directories that
|
||||
don't yet exist.
|
||||
|
||||
If the directory already exists, do nothing.
|
||||
|
||||
OPTIONS are:
|
||||
|
||||
-m, --mode If a directory needs to be created, set the mode as an octal
|
||||
permission string.
|
||||
|
||||
102
resources/app/node_modules/nedb/node_modules/mkdirp/index.js
generated
vendored
Normal file
102
resources/app/node_modules/nedb/node_modules/mkdirp/index.js
generated
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var _0777 = parseInt('0777', 8);
|
||||
|
||||
module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP;
|
||||
|
||||
function mkdirP (p, opts, f, made) {
|
||||
if (typeof opts === 'function') {
|
||||
f = opts;
|
||||
opts = {};
|
||||
}
|
||||
else if (!opts || typeof opts !== 'object') {
|
||||
opts = { mode: opts };
|
||||
}
|
||||
|
||||
var mode = opts.mode;
|
||||
var xfs = opts.fs || fs;
|
||||
|
||||
if (mode === undefined) {
|
||||
mode = _0777
|
||||
}
|
||||
if (!made) made = null;
|
||||
|
||||
var cb = f || /* istanbul ignore next */ function () {};
|
||||
p = path.resolve(p);
|
||||
|
||||
xfs.mkdir(p, mode, function (er) {
|
||||
if (!er) {
|
||||
made = made || p;
|
||||
return cb(null, made);
|
||||
}
|
||||
switch (er.code) {
|
||||
case 'ENOENT':
|
||||
/* istanbul ignore if */
|
||||
if (path.dirname(p) === p) return cb(er);
|
||||
mkdirP(path.dirname(p), opts, function (er, made) {
|
||||
/* istanbul ignore if */
|
||||
if (er) cb(er, made);
|
||||
else mkdirP(p, opts, cb, made);
|
||||
});
|
||||
break;
|
||||
|
||||
// In the case of any other error, just see if there's a dir
|
||||
// there already. If so, then hooray! If not, then something
|
||||
// is borked.
|
||||
default:
|
||||
xfs.stat(p, function (er2, stat) {
|
||||
// if the stat fails, then that's super weird.
|
||||
// let the original error be the failure reason.
|
||||
if (er2 || !stat.isDirectory()) cb(er, made)
|
||||
else cb(null, made);
|
||||
});
|
||||
break;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
mkdirP.sync = function sync (p, opts, made) {
|
||||
if (!opts || typeof opts !== 'object') {
|
||||
opts = { mode: opts };
|
||||
}
|
||||
|
||||
var mode = opts.mode;
|
||||
var xfs = opts.fs || fs;
|
||||
|
||||
if (mode === undefined) {
|
||||
mode = _0777
|
||||
}
|
||||
if (!made) made = null;
|
||||
|
||||
p = path.resolve(p);
|
||||
|
||||
try {
|
||||
xfs.mkdirSync(p, mode);
|
||||
made = made || p;
|
||||
}
|
||||
catch (err0) {
|
||||
switch (err0.code) {
|
||||
case 'ENOENT' :
|
||||
made = sync(path.dirname(p), opts, made);
|
||||
sync(p, opts, made);
|
||||
break;
|
||||
|
||||
// In the case of any other error, just see if there's a dir
|
||||
// there already. If so, then hooray! If not, then something
|
||||
// is borked.
|
||||
default:
|
||||
var stat;
|
||||
try {
|
||||
stat = xfs.statSync(p);
|
||||
}
|
||||
catch (err1) /* istanbul ignore next */ {
|
||||
throw err0;
|
||||
}
|
||||
/* istanbul ignore if */
|
||||
if (!stat.isDirectory()) throw err0;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return made;
|
||||
};
|
||||
26
resources/app/node_modules/nedb/node_modules/mkdirp/package.json
generated
vendored
Normal file
26
resources/app/node_modules/nedb/node_modules/mkdirp/package.json
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"name": "mkdirp",
|
||||
"description": "Recursively mkdir, like `mkdir -p`",
|
||||
"version": "0.5.6",
|
||||
"publishConfig": {
|
||||
"tag": "legacy"
|
||||
},
|
||||
"author": "James Halliday <mail@substack.net> (http://substack.net)",
|
||||
"main": "index.js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/substack/node-mkdirp.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"minimist": "^1.2.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "^16.0.1"
|
||||
},
|
||||
"bin": "bin/cmd.js",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"bin",
|
||||
"index.js"
|
||||
]
|
||||
}
|
||||
35
resources/app/node_modules/nedb/package.json
generated
vendored
Normal file
35
resources/app/node_modules/nedb/package.json
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"name": "nedb",
|
||||
"version": "1.8.0",
|
||||
"author": {
|
||||
"name": "Louis Chatriot",
|
||||
"email": "louis.chatriot@gmail.com"
|
||||
},
|
||||
"description": "File-based embedded data store for node.js",
|
||||
"homepage": "https://github.com/louischatriot/nedb",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git@github.com:louischatriot/nedb.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"async": "0.2.10",
|
||||
"binary-search-tree": "0.2.5",
|
||||
"localforage": "^1.3.0",
|
||||
"mkdirp": "~0.5.1",
|
||||
"underscore": "~1.4.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"chai": "^3.2.0",
|
||||
"mocha": "1.4.x",
|
||||
"request": "2.9.x",
|
||||
"sinon": "1.3.x",
|
||||
"exec-time": "0.0.2",
|
||||
"commander": "1.1.1"
|
||||
},
|
||||
"main": "index",
|
||||
"browser": {
|
||||
"./lib/customUtils.js": "./browser-version/browser-specific/lib/customUtils.js",
|
||||
"./lib/storage.js": "./browser-version/browser-specific/lib/storage.js"
|
||||
},
|
||||
"license": "SEE LICENSE IN LICENSE"
|
||||
}
|
||||
123
resources/app/node_modules/nedb/test_lac/loadAndCrash.test.js
generated
vendored
Executable file
123
resources/app/node_modules/nedb/test_lac/loadAndCrash.test.js
generated
vendored
Executable file
@@ -0,0 +1,123 @@
|
||||
/**
|
||||
* Load and modify part of fs to ensure writeFile will crash after writing 5000 bytes
|
||||
*/
|
||||
var fs = require('fs');
|
||||
|
||||
function rethrow() {
|
||||
// Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
|
||||
// is fairly slow to generate.
|
||||
if (DEBUG) {
|
||||
var backtrace = new Error();
|
||||
return function(err) {
|
||||
if (err) {
|
||||
backtrace.stack = err.name + ': ' + err.message +
|
||||
backtrace.stack.substr(backtrace.name.length);
|
||||
throw backtrace;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return function(err) {
|
||||
if (err) {
|
||||
throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function maybeCallback(cb) {
|
||||
return typeof cb === 'function' ? cb : rethrow();
|
||||
}
|
||||
|
||||
function isFd(path) {
|
||||
return (path >>> 0) === path;
|
||||
}
|
||||
|
||||
function assertEncoding(encoding) {
|
||||
if (encoding && !Buffer.isEncoding(encoding)) {
|
||||
throw new Error('Unknown encoding: ' + encoding);
|
||||
}
|
||||
}
|
||||
|
||||
var onePassDone = false;
|
||||
function writeAll(fd, isUserFd, buffer, offset, length, position, callback_) {
|
||||
var callback = maybeCallback(arguments[arguments.length - 1]);
|
||||
|
||||
if (onePassDone) { process.exit(1); } // Crash on purpose before rewrite done
|
||||
var l = Math.min(5000, length); // Force write by chunks of 5000 bytes to ensure data will be incomplete on crash
|
||||
|
||||
// write(fd, buffer, offset, length, position, callback)
|
||||
fs.write(fd, buffer, offset, l, position, function(writeErr, written) {
|
||||
if (writeErr) {
|
||||
if (isUserFd) {
|
||||
if (callback) callback(writeErr);
|
||||
} else {
|
||||
fs.close(fd, function() {
|
||||
if (callback) callback(writeErr);
|
||||
});
|
||||
}
|
||||
} else {
|
||||
onePassDone = true;
|
||||
if (written === length) {
|
||||
if (isUserFd) {
|
||||
if (callback) callback(null);
|
||||
} else {
|
||||
fs.close(fd, callback);
|
||||
}
|
||||
} else {
|
||||
offset += written;
|
||||
length -= written;
|
||||
if (position !== null) {
|
||||
position += written;
|
||||
}
|
||||
writeAll(fd, isUserFd, buffer, offset, length, position, callback);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fs.writeFile = function(path, data, options, callback_) {
|
||||
var callback = maybeCallback(arguments[arguments.length - 1]);
|
||||
|
||||
if (!options || typeof options === 'function') {
|
||||
options = { encoding: 'utf8', mode: 438, flag: 'w' }; // Mode 438 == 0o666 (compatibility with older Node releases)
|
||||
} else if (typeof options === 'string') {
|
||||
options = { encoding: options, mode: 438, flag: 'w' }; // Mode 438 == 0o666 (compatibility with older Node releases)
|
||||
} else if (typeof options !== 'object') {
|
||||
throwOptionsError(options);
|
||||
}
|
||||
|
||||
assertEncoding(options.encoding);
|
||||
|
||||
var flag = options.flag || 'w';
|
||||
|
||||
if (isFd(path)) {
|
||||
writeFd(path, true);
|
||||
return;
|
||||
}
|
||||
|
||||
fs.open(path, flag, options.mode, function(openErr, fd) {
|
||||
if (openErr) {
|
||||
if (callback) callback(openErr);
|
||||
} else {
|
||||
writeFd(fd, false);
|
||||
}
|
||||
});
|
||||
|
||||
function writeFd(fd, isUserFd) {
|
||||
var buffer = (data instanceof Buffer) ? data : new Buffer('' + data,
|
||||
options.encoding || 'utf8');
|
||||
var position = /a/.test(flag) ? null : 0;
|
||||
|
||||
writeAll(fd, isUserFd, buffer, 0, buffer.length, position, callback);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
// End of fs modification
|
||||
var Nedb = require('../lib/datastore.js')
|
||||
, db = new Nedb({ filename: 'workspace/lac.db' })
|
||||
;
|
||||
|
||||
db.loadDatabase();
|
||||
67
resources/app/node_modules/nedb/test_lac/openFds.test.js
generated
vendored
Normal file
67
resources/app/node_modules/nedb/test_lac/openFds.test.js
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
var fs = require('fs')
|
||||
, child_process = require('child_process')
|
||||
, async = require('async')
|
||||
, Nedb = require('../lib/datastore')
|
||||
, db = new Nedb({ filename: './workspace/openfds.db', autoload: true })
|
||||
, N = 64 // Half the allowed file descriptors
|
||||
, i, fds
|
||||
;
|
||||
|
||||
function multipleOpen (filename, N, callback) {
|
||||
async.whilst( function () { return i < N; }
|
||||
, function (cb) {
|
||||
fs.open(filename, 'r', function (err, fd) {
|
||||
i += 1;
|
||||
if (fd) { fds.push(fd); }
|
||||
return cb(err);
|
||||
});
|
||||
}
|
||||
, callback);
|
||||
}
|
||||
|
||||
async.waterfall([
|
||||
// Check that ulimit has been set to the correct value
|
||||
function (cb) {
|
||||
i = 0;
|
||||
fds = [];
|
||||
multipleOpen('./test_lac/openFdsTestFile', 2 * N + 1, function (err) {
|
||||
if (!err) { console.log("No error occured while opening a file too many times"); }
|
||||
fds.forEach(function (fd) { fs.closeSync(fd); });
|
||||
return cb();
|
||||
})
|
||||
}
|
||||
, function (cb) {
|
||||
i = 0;
|
||||
fds = [];
|
||||
multipleOpen('./test_lac/openFdsTestFile2', N, function (err) {
|
||||
if (err) { console.log('An unexpected error occured when opening file not too many times: ' + err); }
|
||||
fds.forEach(function (fd) { fs.closeSync(fd); });
|
||||
return cb();
|
||||
})
|
||||
}
|
||||
// Then actually test NeDB persistence
|
||||
, function () {
|
||||
db.remove({}, { multi: true }, function (err) {
|
||||
if (err) { console.log(err); }
|
||||
db.insert({ hello: 'world' }, function (err) {
|
||||
if (err) { console.log(err); }
|
||||
|
||||
i = 0;
|
||||
async.whilst( function () { return i < 2 * N + 1; }
|
||||
, function (cb) {
|
||||
db.persistence.persistCachedDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
i += 1;
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
, function (err) {
|
||||
if (err) { console.log("Got unexpected error during one peresistence operation: " + err); }
|
||||
}
|
||||
);
|
||||
|
||||
});
|
||||
});
|
||||
}
|
||||
]);
|
||||
|
||||
2
resources/app/node_modules/nedb/test_lac/openFdsLaunch.sh
generated
vendored
Executable file
2
resources/app/node_modules/nedb/test_lac/openFdsLaunch.sh
generated
vendored
Executable file
@@ -0,0 +1,2 @@
|
||||
ulimit -n 128
|
||||
node ./test_lac/openFds.test.js
|
||||
1
resources/app/node_modules/nedb/test_lac/openFdsTestFile
generated
vendored
Normal file
1
resources/app/node_modules/nedb/test_lac/openFdsTestFile
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Random stuff
|
||||
1
resources/app/node_modules/nedb/test_lac/openFdsTestFile2
generated
vendored
Normal file
1
resources/app/node_modules/nedb/test_lac/openFdsTestFile2
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Some other random stuff
|
||||
Reference in New Issue
Block a user