Initial commit
This commit is contained in:
96
node_modules/mongodb/lib/operations/add_user.js
generated
vendored
Normal file
96
node_modules/mongodb/lib/operations/add_user.js
generated
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const CommandOperation = require('./command');
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const crypto = require('crypto');
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const toError = require('../utils').toError;
|
||||
|
||||
class AddUserOperation extends CommandOperation {
|
||||
constructor(db, username, password, options) {
|
||||
super(db, options);
|
||||
|
||||
this.username = username;
|
||||
this.password = password;
|
||||
}
|
||||
|
||||
_buildCommand() {
|
||||
const db = this.db;
|
||||
const username = this.username;
|
||||
const password = this.password;
|
||||
const options = this.options;
|
||||
|
||||
// Get additional values
|
||||
let roles = Array.isArray(options.roles) ? options.roles : [];
|
||||
|
||||
// If not roles defined print deprecated message
|
||||
// TODO: handle deprecation properly
|
||||
if (roles.length === 0) {
|
||||
console.log('Creating a user without roles is deprecated in MongoDB >= 2.6');
|
||||
}
|
||||
|
||||
// Check the db name and add roles if needed
|
||||
if (
|
||||
(db.databaseName.toLowerCase() === 'admin' || options.dbName === 'admin') &&
|
||||
!Array.isArray(options.roles)
|
||||
) {
|
||||
roles = ['root'];
|
||||
} else if (!Array.isArray(options.roles)) {
|
||||
roles = ['dbOwner'];
|
||||
}
|
||||
|
||||
const digestPassword = db.s.topology.lastIsMaster().maxWireVersion >= 7;
|
||||
|
||||
let userPassword = password;
|
||||
|
||||
if (!digestPassword) {
|
||||
// Use node md5 generator
|
||||
const md5 = crypto.createHash('md5');
|
||||
// Generate keys used for authentication
|
||||
md5.update(username + ':mongo:' + password);
|
||||
userPassword = md5.digest('hex');
|
||||
}
|
||||
|
||||
// Build the command to execute
|
||||
const command = {
|
||||
createUser: username,
|
||||
customData: options.customData || {},
|
||||
roles: roles,
|
||||
digestPassword
|
||||
};
|
||||
|
||||
// No password
|
||||
if (typeof password === 'string') {
|
||||
command.pwd = userPassword;
|
||||
}
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const options = this.options;
|
||||
|
||||
// Error out if digestPassword set
|
||||
if (options.digestPassword != null) {
|
||||
return callback(
|
||||
toError(
|
||||
"The digestPassword option is not supported via add_user. Please use db.command('createUser', ...) instead for this option."
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Attempt to execute auth command
|
||||
super.execute((err, r) => {
|
||||
if (!err) {
|
||||
return handleCallback(callback, err, r);
|
||||
}
|
||||
|
||||
return handleCallback(callback, err, null);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(AddUserOperation, Aspect.WRITE_OPERATION);
|
||||
|
||||
module.exports = AddUserOperation;
|
||||
62
node_modules/mongodb/lib/operations/admin_ops.js
generated
vendored
Normal file
62
node_modules/mongodb/lib/operations/admin_ops.js
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
'use strict';
|
||||
|
||||
const executeCommand = require('./db_ops').executeCommand;
|
||||
const executeDbAdminCommand = require('./db_ops').executeDbAdminCommand;
|
||||
|
||||
/**
|
||||
* Get ReplicaSet status
|
||||
*
|
||||
* @param {Admin} a collection instance.
|
||||
* @param {Object} [options] Optional settings. See Admin.prototype.replSetGetStatus for a list of options.
|
||||
* @param {Admin~resultCallback} [callback] The command result callback.
|
||||
*/
|
||||
function replSetGetStatus(admin, options, callback) {
|
||||
executeDbAdminCommand(admin.s.db, { replSetGetStatus: 1 }, options, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve this db's server status.
|
||||
*
|
||||
* @param {Admin} a collection instance.
|
||||
* @param {Object} [options] Optional settings. See Admin.prototype.serverStatus for a list of options.
|
||||
* @param {Admin~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
function serverStatus(admin, options, callback) {
|
||||
executeDbAdminCommand(admin.s.db, { serverStatus: 1 }, options, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate an existing collection
|
||||
*
|
||||
* @param {Admin} a collection instance.
|
||||
* @param {string} collectionName The name of the collection to validate.
|
||||
* @param {Object} [options] Optional settings. See Admin.prototype.validateCollection for a list of options.
|
||||
* @param {Admin~resultCallback} [callback] The command result callback.
|
||||
*/
|
||||
function validateCollection(admin, collectionName, options, callback) {
|
||||
const command = { validate: collectionName };
|
||||
const keys = Object.keys(options);
|
||||
|
||||
// Decorate command with extra options
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
if (options.hasOwnProperty(keys[i]) && keys[i] !== 'session') {
|
||||
command[keys[i]] = options[keys[i]];
|
||||
}
|
||||
}
|
||||
|
||||
executeCommand(admin.s.db, command, options, (err, doc) => {
|
||||
if (err != null) return callback(err, null);
|
||||
|
||||
if (doc.ok === 0) return callback(new Error('Error with validate command'), null);
|
||||
if (doc.result != null && doc.result.constructor !== String)
|
||||
return callback(new Error('Error with validation data'), null);
|
||||
if (doc.result != null && doc.result.match(/exception|corrupt/) != null)
|
||||
return callback(new Error('Error: invalid collection ' + collectionName), null);
|
||||
if (doc.valid != null && !doc.valid)
|
||||
return callback(new Error('Error: invalid collection ' + collectionName), null);
|
||||
|
||||
return callback(null, doc);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { replSetGetStatus, serverStatus, validateCollection };
|
||||
106
node_modules/mongodb/lib/operations/aggregate.js
generated
vendored
Normal file
106
node_modules/mongodb/lib/operations/aggregate.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
'use strict';
|
||||
|
||||
const CommandOperationV2 = require('./command_v2');
|
||||
const MongoError = require('../core').MongoError;
|
||||
const maxWireVersion = require('../core/utils').maxWireVersion;
|
||||
const ReadPreference = require('../core').ReadPreference;
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
|
||||
const DB_AGGREGATE_COLLECTION = 1;
|
||||
const MIN_WIRE_VERSION_$OUT_READ_CONCERN_SUPPORT = 8;
|
||||
|
||||
class AggregateOperation extends CommandOperationV2 {
|
||||
constructor(parent, pipeline, options) {
|
||||
super(parent, options, { fullResponse: true });
|
||||
|
||||
this.target =
|
||||
parent.s.namespace && parent.s.namespace.collection
|
||||
? parent.s.namespace.collection
|
||||
: DB_AGGREGATE_COLLECTION;
|
||||
|
||||
this.pipeline = pipeline;
|
||||
|
||||
// determine if we have a write stage, override read preference if so
|
||||
this.hasWriteStage = false;
|
||||
if (typeof options.out === 'string') {
|
||||
this.pipeline = this.pipeline.concat({ $out: options.out });
|
||||
this.hasWriteStage = true;
|
||||
} else if (pipeline.length > 0) {
|
||||
const finalStage = pipeline[pipeline.length - 1];
|
||||
if (finalStage.$out || finalStage.$merge) {
|
||||
this.hasWriteStage = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.hasWriteStage) {
|
||||
this.readPreference = ReadPreference.primary;
|
||||
}
|
||||
|
||||
if (options.explain && (this.readConcern || this.writeConcern)) {
|
||||
throw new MongoError(
|
||||
'"explain" cannot be used on an aggregate call with readConcern/writeConcern'
|
||||
);
|
||||
}
|
||||
|
||||
if (options.cursor != null && typeof options.cursor !== 'object') {
|
||||
throw new MongoError('cursor options must be an object');
|
||||
}
|
||||
}
|
||||
|
||||
get canRetryRead() {
|
||||
return !this.hasWriteStage;
|
||||
}
|
||||
|
||||
addToPipeline(stage) {
|
||||
this.pipeline.push(stage);
|
||||
}
|
||||
|
||||
execute(server, callback) {
|
||||
const options = this.options;
|
||||
const serverWireVersion = maxWireVersion(server);
|
||||
const command = { aggregate: this.target, pipeline: this.pipeline };
|
||||
|
||||
if (this.hasWriteStage && serverWireVersion < MIN_WIRE_VERSION_$OUT_READ_CONCERN_SUPPORT) {
|
||||
this.readConcern = null;
|
||||
}
|
||||
|
||||
if (serverWireVersion >= 5) {
|
||||
if (this.hasWriteStage && this.writeConcern) {
|
||||
Object.assign(command, { writeConcern: this.writeConcern });
|
||||
}
|
||||
}
|
||||
|
||||
if (options.bypassDocumentValidation === true) {
|
||||
command.bypassDocumentValidation = options.bypassDocumentValidation;
|
||||
}
|
||||
|
||||
if (typeof options.allowDiskUse === 'boolean') {
|
||||
command.allowDiskUse = options.allowDiskUse;
|
||||
}
|
||||
|
||||
if (options.hint) {
|
||||
command.hint = options.hint;
|
||||
}
|
||||
|
||||
if (options.explain) {
|
||||
options.full = false;
|
||||
command.explain = options.explain;
|
||||
}
|
||||
|
||||
command.cursor = options.cursor || {};
|
||||
if (options.batchSize && !this.hasWriteStage) {
|
||||
command.cursor.batchSize = options.batchSize;
|
||||
}
|
||||
|
||||
super.executeCommand(server, command, callback);
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(AggregateOperation, [
|
||||
Aspect.READ_OPERATION,
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.EXECUTE_WITH_SELECTION
|
||||
]);
|
||||
|
||||
module.exports = AggregateOperation;
|
||||
104
node_modules/mongodb/lib/operations/bulk_write.js
generated
vendored
Normal file
104
node_modules/mongodb/lib/operations/bulk_write.js
generated
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
'use strict';
|
||||
|
||||
const applyRetryableWrites = require('../utils').applyRetryableWrites;
|
||||
const applyWriteConcern = require('../utils').applyWriteConcern;
|
||||
const MongoError = require('../core').MongoError;
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
|
||||
class BulkWriteOperation extends OperationBase {
|
||||
constructor(collection, operations, options) {
|
||||
super(options);
|
||||
|
||||
this.collection = collection;
|
||||
this.operations = operations;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
const operations = this.operations;
|
||||
let options = this.options;
|
||||
|
||||
// Add ignoreUndfined
|
||||
if (coll.s.options.ignoreUndefined) {
|
||||
options = Object.assign({}, options);
|
||||
options.ignoreUndefined = coll.s.options.ignoreUndefined;
|
||||
}
|
||||
|
||||
// Create the bulk operation
|
||||
const bulk =
|
||||
options.ordered === true || options.ordered == null
|
||||
? coll.initializeOrderedBulkOp(options)
|
||||
: coll.initializeUnorderedBulkOp(options);
|
||||
|
||||
// Do we have a collation
|
||||
let collation = false;
|
||||
|
||||
// for each op go through and add to the bulk
|
||||
try {
|
||||
for (let i = 0; i < operations.length; i++) {
|
||||
// Get the operation type
|
||||
const key = Object.keys(operations[i])[0];
|
||||
// Check if we have a collation
|
||||
if (operations[i][key].collation) {
|
||||
collation = true;
|
||||
}
|
||||
|
||||
// Pass to the raw bulk
|
||||
bulk.raw(operations[i]);
|
||||
}
|
||||
} catch (err) {
|
||||
return callback(err, null);
|
||||
}
|
||||
|
||||
// Final options for retryable writes and write concern
|
||||
let finalOptions = Object.assign({}, options);
|
||||
finalOptions = applyRetryableWrites(finalOptions, coll.s.db);
|
||||
finalOptions = applyWriteConcern(finalOptions, { db: coll.s.db, collection: coll }, options);
|
||||
|
||||
const writeCon = finalOptions.writeConcern ? finalOptions.writeConcern : {};
|
||||
const capabilities = coll.s.topology.capabilities();
|
||||
|
||||
// Did the user pass in a collation, check if our write server supports it
|
||||
if (collation && capabilities && !capabilities.commandsTakeCollation) {
|
||||
return callback(new MongoError('server/primary/mongos does not support collation'));
|
||||
}
|
||||
|
||||
// Execute the bulk
|
||||
bulk.execute(writeCon, finalOptions, (err, r) => {
|
||||
// We have connection level error
|
||||
if (!r && err) {
|
||||
return callback(err, null);
|
||||
}
|
||||
|
||||
r.insertedCount = r.nInserted;
|
||||
r.matchedCount = r.nMatched;
|
||||
r.modifiedCount = r.nModified || 0;
|
||||
r.deletedCount = r.nRemoved;
|
||||
r.upsertedCount = r.getUpsertedIds().length;
|
||||
r.upsertedIds = {};
|
||||
r.insertedIds = {};
|
||||
|
||||
// Update the n
|
||||
r.n = r.insertedCount;
|
||||
|
||||
// Inserted documents
|
||||
const inserted = r.getInsertedIds();
|
||||
// Map inserted ids
|
||||
for (let i = 0; i < inserted.length; i++) {
|
||||
r.insertedIds[inserted[i].index] = inserted[i]._id;
|
||||
}
|
||||
|
||||
// Upserted documents
|
||||
const upserted = r.getUpsertedIds();
|
||||
// Map upserted ids
|
||||
for (let i = 0; i < upserted.length; i++) {
|
||||
r.upsertedIds[upserted[i].index] = upserted[i]._id;
|
||||
}
|
||||
|
||||
// Return the results
|
||||
callback(null, r);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BulkWriteOperation;
|
||||
353
node_modules/mongodb/lib/operations/collection_ops.js
generated
vendored
Normal file
353
node_modules/mongodb/lib/operations/collection_ops.js
generated
vendored
Normal file
@@ -0,0 +1,353 @@
|
||||
'use strict';
|
||||
|
||||
const applyWriteConcern = require('../utils').applyWriteConcern;
|
||||
const Code = require('../core').BSON.Code;
|
||||
const createIndexDb = require('./db_ops').createIndex;
|
||||
const decorateWithCollation = require('../utils').decorateWithCollation;
|
||||
const decorateWithReadConcern = require('../utils').decorateWithReadConcern;
|
||||
const ensureIndexDb = require('./db_ops').ensureIndex;
|
||||
const evaluate = require('./db_ops').evaluate;
|
||||
const executeCommand = require('./db_ops').executeCommand;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const indexInformationDb = require('./db_ops').indexInformation;
|
||||
const Long = require('../core').BSON.Long;
|
||||
const MongoError = require('../core').MongoError;
|
||||
const ReadPreference = require('../core').ReadPreference;
|
||||
const insertDocuments = require('./common_functions').insertDocuments;
|
||||
const updateDocuments = require('./common_functions').updateDocuments;
|
||||
|
||||
/**
|
||||
* Group function helper
|
||||
* @ignore
|
||||
*/
|
||||
// var groupFunction = function () {
|
||||
// var c = db[ns].find(condition);
|
||||
// var map = new Map();
|
||||
// var reduce_function = reduce;
|
||||
//
|
||||
// while (c.hasNext()) {
|
||||
// var obj = c.next();
|
||||
// var key = {};
|
||||
//
|
||||
// for (var i = 0, len = keys.length; i < len; ++i) {
|
||||
// var k = keys[i];
|
||||
// key[k] = obj[k];
|
||||
// }
|
||||
//
|
||||
// var aggObj = map.get(key);
|
||||
//
|
||||
// if (aggObj == null) {
|
||||
// var newObj = Object.extend({}, key);
|
||||
// aggObj = Object.extend(newObj, initial);
|
||||
// map.put(key, aggObj);
|
||||
// }
|
||||
//
|
||||
// reduce_function(obj, aggObj);
|
||||
// }
|
||||
//
|
||||
// return { "result": map.values() };
|
||||
// }.toString();
|
||||
const groupFunction =
|
||||
'function () {\nvar c = db[ns].find(condition);\nvar map = new Map();\nvar reduce_function = reduce;\n\nwhile (c.hasNext()) {\nvar obj = c.next();\nvar key = {};\n\nfor (var i = 0, len = keys.length; i < len; ++i) {\nvar k = keys[i];\nkey[k] = obj[k];\n}\n\nvar aggObj = map.get(key);\n\nif (aggObj == null) {\nvar newObj = Object.extend({}, key);\naggObj = Object.extend(newObj, initial);\nmap.put(key, aggObj);\n}\n\nreduce_function(obj, aggObj);\n}\n\nreturn { "result": map.values() };\n}';
|
||||
|
||||
/**
|
||||
* Create an index on the db and collection.
|
||||
*
|
||||
* @method
|
||||
* @param {Collection} a Collection instance.
|
||||
* @param {(string|object)} fieldOrSpec Defines the index.
|
||||
* @param {object} [options] Optional settings. See Collection.prototype.createIndex for a list of options.
|
||||
* @param {Collection~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
function createIndex(coll, fieldOrSpec, options, callback) {
|
||||
createIndexDb(coll.s.db, coll.collectionName, fieldOrSpec, options, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create multiple indexes in the collection. This method is only supported for
|
||||
* MongoDB 2.6 or higher. Earlier version of MongoDB will throw a command not supported
|
||||
* error. Index specifications are defined at http://docs.mongodb.org/manual/reference/command/createIndexes/.
|
||||
*
|
||||
* @method
|
||||
* @param {Collection} a Collection instance.
|
||||
* @param {array} indexSpecs An array of index specifications to be created
|
||||
* @param {Object} [options] Optional settings. See Collection.prototype.createIndexes for a list of options.
|
||||
* @param {Collection~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
function createIndexes(coll, indexSpecs, options, callback) {
|
||||
const capabilities = coll.s.topology.capabilities();
|
||||
|
||||
// Ensure we generate the correct name if the parameter is not set
|
||||
for (let i = 0; i < indexSpecs.length; i++) {
|
||||
if (indexSpecs[i].name == null) {
|
||||
const keys = [];
|
||||
|
||||
// Did the user pass in a collation, check if our write server supports it
|
||||
if (indexSpecs[i].collation && capabilities && !capabilities.commandsTakeCollation) {
|
||||
return callback(new MongoError('server/primary/mongos does not support collation'));
|
||||
}
|
||||
|
||||
for (let name in indexSpecs[i].key) {
|
||||
keys.push(`${name}_${indexSpecs[i].key[name]}`);
|
||||
}
|
||||
|
||||
// Set the name
|
||||
indexSpecs[i].name = keys.join('_');
|
||||
}
|
||||
}
|
||||
|
||||
options = Object.assign({}, options, { readPreference: ReadPreference.PRIMARY });
|
||||
|
||||
// Execute the index
|
||||
executeCommand(
|
||||
coll.s.db,
|
||||
{
|
||||
createIndexes: coll.collectionName,
|
||||
indexes: indexSpecs
|
||||
},
|
||||
options,
|
||||
callback
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure that an index exists. If the index does not exist, this function creates it.
|
||||
*
|
||||
* @method
|
||||
* @param {Collection} a Collection instance.
|
||||
* @param {(string|object)} fieldOrSpec Defines the index.
|
||||
* @param {object} [options] Optional settings. See Collection.prototype.ensureIndex for a list of options.
|
||||
* @param {Collection~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
function ensureIndex(coll, fieldOrSpec, options, callback) {
|
||||
ensureIndexDb(coll.s.db, coll.collectionName, fieldOrSpec, options, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a group command across a collection.
|
||||
*
|
||||
* @method
|
||||
* @param {Collection} a Collection instance.
|
||||
* @param {(object|array|function|code)} keys An object, array or function expressing the keys to group by.
|
||||
* @param {object} condition An optional condition that must be true for a row to be considered.
|
||||
* @param {object} initial Initial value of the aggregation counter object.
|
||||
* @param {(function|Code)} reduce The reduce function aggregates (reduces) the objects iterated
|
||||
* @param {(function|Code)} finalize An optional function to be run on each item in the result set just before the item is returned.
|
||||
* @param {boolean} command Specify if you wish to run using the internal group command or using eval, default is true.
|
||||
* @param {object} [options] Optional settings. See Collection.prototype.group for a list of options.
|
||||
* @param {Collection~resultCallback} [callback] The command result callback
|
||||
* @deprecated MongoDB 3.6 or higher will no longer support the group command. We recommend rewriting using the aggregation framework.
|
||||
*/
|
||||
function group(coll, keys, condition, initial, reduce, finalize, command, options, callback) {
|
||||
// Execute using the command
|
||||
if (command) {
|
||||
const reduceFunction = reduce && reduce._bsontype === 'Code' ? reduce : new Code(reduce);
|
||||
|
||||
const selector = {
|
||||
group: {
|
||||
ns: coll.collectionName,
|
||||
$reduce: reduceFunction,
|
||||
cond: condition,
|
||||
initial: initial,
|
||||
out: 'inline'
|
||||
}
|
||||
};
|
||||
|
||||
// if finalize is defined
|
||||
if (finalize != null) selector.group['finalize'] = finalize;
|
||||
// Set up group selector
|
||||
if ('function' === typeof keys || (keys && keys._bsontype === 'Code')) {
|
||||
selector.group.$keyf = keys && keys._bsontype === 'Code' ? keys : new Code(keys);
|
||||
} else {
|
||||
const hash = {};
|
||||
keys.forEach(key => {
|
||||
hash[key] = 1;
|
||||
});
|
||||
selector.group.key = hash;
|
||||
}
|
||||
|
||||
options = Object.assign({}, options);
|
||||
// Ensure we have the right read preference inheritance
|
||||
options.readPreference = ReadPreference.resolve(coll, options);
|
||||
|
||||
// Do we have a readConcern specified
|
||||
decorateWithReadConcern(selector, coll, options);
|
||||
|
||||
// Have we specified collation
|
||||
try {
|
||||
decorateWithCollation(selector, coll, options);
|
||||
} catch (err) {
|
||||
return callback(err, null);
|
||||
}
|
||||
|
||||
// Execute command
|
||||
executeCommand(coll.s.db, selector, options, (err, result) => {
|
||||
if (err) return handleCallback(callback, err, null);
|
||||
handleCallback(callback, null, result.retval);
|
||||
});
|
||||
} else {
|
||||
// Create execution scope
|
||||
const scope = reduce != null && reduce._bsontype === 'Code' ? reduce.scope : {};
|
||||
|
||||
scope.ns = coll.collectionName;
|
||||
scope.keys = keys;
|
||||
scope.condition = condition;
|
||||
scope.initial = initial;
|
||||
|
||||
// Pass in the function text to execute within mongodb.
|
||||
const groupfn = groupFunction.replace(/ reduce;/, reduce.toString() + ';');
|
||||
|
||||
evaluate(coll.s.db, new Code(groupfn, scope), null, options, (err, results) => {
|
||||
if (err) return handleCallback(callback, err, null);
|
||||
handleCallback(callback, null, results.result || results);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve all the indexes on the collection.
|
||||
*
|
||||
* @method
|
||||
* @param {Collection} a Collection instance.
|
||||
* @param {Object} [options] Optional settings. See Collection.prototype.indexes for a list of options.
|
||||
* @param {Collection~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
function indexes(coll, options, callback) {
|
||||
options = Object.assign({}, { full: true }, options);
|
||||
indexInformationDb(coll.s.db, coll.collectionName, options, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if one or more indexes exist on the collection. This fails on the first index that doesn't exist.
|
||||
*
|
||||
* @method
|
||||
* @param {Collection} a Collection instance.
|
||||
* @param {(string|array)} indexes One or more index names to check.
|
||||
* @param {Object} [options] Optional settings. See Collection.prototype.indexExists for a list of options.
|
||||
* @param {Collection~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
function indexExists(coll, indexes, options, callback) {
|
||||
indexInformation(coll, options, (err, indexInformation) => {
|
||||
// If we have an error return
|
||||
if (err != null) return handleCallback(callback, err, null);
|
||||
// Let's check for the index names
|
||||
if (!Array.isArray(indexes))
|
||||
return handleCallback(callback, null, indexInformation[indexes] != null);
|
||||
// Check in list of indexes
|
||||
for (let i = 0; i < indexes.length; i++) {
|
||||
if (indexInformation[indexes[i]] == null) {
|
||||
return handleCallback(callback, null, false);
|
||||
}
|
||||
}
|
||||
|
||||
// All keys found return true
|
||||
return handleCallback(callback, null, true);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve this collection's index info.
|
||||
*
|
||||
* @method
|
||||
* @param {Collection} a Collection instance.
|
||||
* @param {object} [options] Optional settings. See Collection.prototype.indexInformation for a list of options.
|
||||
* @param {Collection~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
function indexInformation(coll, options, callback) {
|
||||
indexInformationDb(coll.s.db, coll.collectionName, options, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return N parallel cursors for a collection to allow parallel reading of the entire collection. There are
|
||||
* no ordering guarantees for returned results.
|
||||
*
|
||||
* @method
|
||||
* @param {Collection} a Collection instance.
|
||||
* @param {object} [options] Optional settings. See Collection.prototype.parallelCollectionScan for a list of options.
|
||||
* @param {Collection~parallelCollectionScanCallback} [callback] The command result callback
|
||||
*/
|
||||
function parallelCollectionScan(coll, options, callback) {
|
||||
// Create command object
|
||||
const commandObject = {
|
||||
parallelCollectionScan: coll.collectionName,
|
||||
numCursors: options.numCursors
|
||||
};
|
||||
|
||||
// Do we have a readConcern specified
|
||||
decorateWithReadConcern(commandObject, coll, options);
|
||||
|
||||
// Store the raw value
|
||||
const raw = options.raw;
|
||||
delete options['raw'];
|
||||
|
||||
// Execute the command
|
||||
executeCommand(coll.s.db, commandObject, options, (err, result) => {
|
||||
if (err) return handleCallback(callback, err, null);
|
||||
if (result == null)
|
||||
return handleCallback(
|
||||
callback,
|
||||
new Error('no result returned for parallelCollectionScan'),
|
||||
null
|
||||
);
|
||||
|
||||
options = Object.assign({ explicitlyIgnoreSession: true }, options);
|
||||
|
||||
const cursors = [];
|
||||
// Add the raw back to the option
|
||||
if (raw) options.raw = raw;
|
||||
// Create command cursors for each item
|
||||
for (let i = 0; i < result.cursors.length; i++) {
|
||||
const rawId = result.cursors[i].cursor.id;
|
||||
// Convert cursorId to Long if needed
|
||||
const cursorId = typeof rawId === 'number' ? Long.fromNumber(rawId) : rawId;
|
||||
// Add a command cursor
|
||||
cursors.push(coll.s.topology.cursor(coll.namespace, cursorId, options));
|
||||
}
|
||||
|
||||
handleCallback(callback, null, cursors);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a document.
|
||||
*
|
||||
* @method
|
||||
* @param {Collection} a Collection instance.
|
||||
* @param {object} doc Document to save
|
||||
* @param {object} [options] Optional settings. See Collection.prototype.save for a list of options.
|
||||
* @param {Collection~writeOpCallback} [callback] The command result callback
|
||||
* @deprecated use insertOne, insertMany, updateOne or updateMany
|
||||
*/
|
||||
function save(coll, doc, options, callback) {
|
||||
// Get the write concern options
|
||||
const finalOptions = applyWriteConcern(
|
||||
Object.assign({}, options),
|
||||
{ db: coll.s.db, collection: coll },
|
||||
options
|
||||
);
|
||||
// Establish if we need to perform an insert or update
|
||||
if (doc._id != null) {
|
||||
finalOptions.upsert = true;
|
||||
return updateDocuments(coll, { _id: doc._id }, doc, finalOptions, callback);
|
||||
}
|
||||
|
||||
// Insert the document
|
||||
insertDocuments(coll, [doc], finalOptions, (err, result) => {
|
||||
if (callback == null) return;
|
||||
if (doc == null) return handleCallback(callback, null, null);
|
||||
if (err) return handleCallback(callback, err, null);
|
||||
handleCallback(callback, null, result);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createIndex,
|
||||
createIndexes,
|
||||
ensureIndex,
|
||||
group,
|
||||
indexes,
|
||||
indexExists,
|
||||
indexInformation,
|
||||
parallelCollectionScan,
|
||||
save
|
||||
};
|
||||
55
node_modules/mongodb/lib/operations/collections.js
generated
vendored
Normal file
55
node_modules/mongodb/lib/operations/collections.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
|
||||
let collection;
|
||||
function loadCollection() {
|
||||
if (!collection) {
|
||||
collection = require('../collection');
|
||||
}
|
||||
return collection;
|
||||
}
|
||||
|
||||
class CollectionsOperation extends OperationBase {
|
||||
constructor(db, options) {
|
||||
super(options);
|
||||
|
||||
this.db = db;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const db = this.db;
|
||||
let options = this.options;
|
||||
|
||||
let Collection = loadCollection();
|
||||
|
||||
options = Object.assign({}, options, { nameOnly: true });
|
||||
// Let's get the collection names
|
||||
db.listCollections({}, options).toArray((err, documents) => {
|
||||
if (err != null) return handleCallback(callback, err, null);
|
||||
// Filter collections removing any illegal ones
|
||||
documents = documents.filter(doc => {
|
||||
return doc.name.indexOf('$') === -1;
|
||||
});
|
||||
|
||||
// Return the collection objects
|
||||
handleCallback(
|
||||
callback,
|
||||
null,
|
||||
documents.map(d => {
|
||||
return new Collection(
|
||||
db,
|
||||
db.s.topology,
|
||||
db.databaseName,
|
||||
d.name,
|
||||
db.s.pkFactory,
|
||||
db.s.options
|
||||
);
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CollectionsOperation;
|
||||
119
node_modules/mongodb/lib/operations/command.js
generated
vendored
Normal file
119
node_modules/mongodb/lib/operations/command.js
generated
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const applyWriteConcern = require('../utils').applyWriteConcern;
|
||||
const debugOptions = require('../utils').debugOptions;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const MongoError = require('../core').MongoError;
|
||||
const ReadPreference = require('../core').ReadPreference;
|
||||
const MongoDBNamespace = require('../utils').MongoDBNamespace;
|
||||
|
||||
const debugFields = [
|
||||
'authSource',
|
||||
'w',
|
||||
'wtimeout',
|
||||
'j',
|
||||
'native_parser',
|
||||
'forceServerObjectId',
|
||||
'serializeFunctions',
|
||||
'raw',
|
||||
'promoteLongs',
|
||||
'promoteValues',
|
||||
'promoteBuffers',
|
||||
'bufferMaxEntries',
|
||||
'numberOfRetries',
|
||||
'retryMiliSeconds',
|
||||
'readPreference',
|
||||
'pkFactory',
|
||||
'parentDb',
|
||||
'promiseLibrary',
|
||||
'noListener'
|
||||
];
|
||||
|
||||
class CommandOperation extends OperationBase {
|
||||
constructor(db, options, collection, command) {
|
||||
super(options);
|
||||
|
||||
if (!this.hasAspect(Aspect.WRITE_OPERATION)) {
|
||||
if (collection != null) {
|
||||
this.options.readPreference = ReadPreference.resolve(collection, options);
|
||||
} else {
|
||||
this.options.readPreference = ReadPreference.resolve(db, options);
|
||||
}
|
||||
} else {
|
||||
if (collection != null) {
|
||||
applyWriteConcern(this.options, { db, coll: collection }, this.options);
|
||||
} else {
|
||||
applyWriteConcern(this.options, { db }, this.options);
|
||||
}
|
||||
this.options.readPreference = ReadPreference.primary;
|
||||
}
|
||||
|
||||
this.db = db;
|
||||
|
||||
if (command != null) {
|
||||
this.command = command;
|
||||
}
|
||||
|
||||
if (collection != null) {
|
||||
this.collection = collection;
|
||||
}
|
||||
}
|
||||
|
||||
_buildCommand() {
|
||||
if (this.command != null) {
|
||||
return this.command;
|
||||
}
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const db = this.db;
|
||||
const options = Object.assign({}, this.options);
|
||||
|
||||
// Did the user destroy the topology
|
||||
if (db.serverConfig && db.serverConfig.isDestroyed()) {
|
||||
return callback(new MongoError('topology was destroyed'));
|
||||
}
|
||||
|
||||
let command;
|
||||
try {
|
||||
command = this._buildCommand();
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
|
||||
// Get the db name we are executing against
|
||||
const dbName = options.dbName || options.authdb || db.databaseName;
|
||||
|
||||
// Convert the readPreference if its not a write
|
||||
if (this.hasAspect(Aspect.WRITE_OPERATION)) {
|
||||
if (options.writeConcern && (!options.session || !options.session.inTransaction())) {
|
||||
command.writeConcern = options.writeConcern;
|
||||
}
|
||||
}
|
||||
|
||||
// Debug information
|
||||
if (db.s.logger.isDebug()) {
|
||||
db.s.logger.debug(
|
||||
`executing command ${JSON.stringify(
|
||||
command
|
||||
)} against ${dbName}.$cmd with options [${JSON.stringify(
|
||||
debugOptions(debugFields, options)
|
||||
)}]`
|
||||
);
|
||||
}
|
||||
|
||||
const namespace =
|
||||
this.namespace != null ? this.namespace : new MongoDBNamespace(dbName, '$cmd');
|
||||
|
||||
// Execute command
|
||||
db.s.topology.command(namespace, command, options, (err, result) => {
|
||||
if (err) return handleCallback(callback, err);
|
||||
if (options.full) return handleCallback(callback, null, result);
|
||||
handleCallback(callback, null, result.result);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CommandOperation;
|
||||
108
node_modules/mongodb/lib/operations/command_v2.js
generated
vendored
Normal file
108
node_modules/mongodb/lib/operations/command_v2.js
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const ReadPreference = require('../core').ReadPreference;
|
||||
const ReadConcern = require('../read_concern');
|
||||
const WriteConcern = require('../write_concern');
|
||||
const maxWireVersion = require('../core/utils').maxWireVersion;
|
||||
const commandSupportsReadConcern = require('../core/sessions').commandSupportsReadConcern;
|
||||
const MongoError = require('../core/error').MongoError;
|
||||
|
||||
const SUPPORTS_WRITE_CONCERN_AND_COLLATION = 5;
|
||||
|
||||
class CommandOperationV2 extends OperationBase {
|
||||
constructor(parent, options, operationOptions) {
|
||||
super(options);
|
||||
|
||||
this.ns = parent.s.namespace.withCollection('$cmd');
|
||||
const propertyProvider = this.hasAspect(Aspect.NO_INHERIT_OPTIONS) ? undefined : parent;
|
||||
this.readPreference = ReadPreference.resolve(propertyProvider, this.options);
|
||||
this.readConcern = resolveReadConcern(propertyProvider, this.options);
|
||||
this.writeConcern = resolveWriteConcern(propertyProvider, this.options);
|
||||
this.explain = false;
|
||||
|
||||
if (operationOptions && typeof operationOptions.fullResponse === 'boolean') {
|
||||
this.fullResponse = true;
|
||||
}
|
||||
|
||||
// TODO: A lot of our code depends on having the read preference in the options. This should
|
||||
// go away, but also requires massive test rewrites.
|
||||
this.options.readPreference = this.readPreference;
|
||||
|
||||
// TODO(NODE-2056): make logger another "inheritable" property
|
||||
if (parent.s.logger) {
|
||||
this.logger = parent.s.logger;
|
||||
} else if (parent.s.db && parent.s.db.logger) {
|
||||
this.logger = parent.s.db.logger;
|
||||
}
|
||||
}
|
||||
|
||||
executeCommand(server, cmd, callback) {
|
||||
// TODO: consider making this a non-enumerable property
|
||||
this.server = server;
|
||||
|
||||
const options = this.options;
|
||||
const serverWireVersion = maxWireVersion(server);
|
||||
const inTransaction = this.session && this.session.inTransaction();
|
||||
|
||||
if (this.readConcern && commandSupportsReadConcern(cmd) && !inTransaction) {
|
||||
Object.assign(cmd, { readConcern: this.readConcern });
|
||||
}
|
||||
|
||||
if (options.collation && serverWireVersion < SUPPORTS_WRITE_CONCERN_AND_COLLATION) {
|
||||
callback(
|
||||
new MongoError(
|
||||
`Server ${server.name}, which reports wire version ${serverWireVersion}, does not support collation`
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (serverWireVersion >= SUPPORTS_WRITE_CONCERN_AND_COLLATION) {
|
||||
if (this.writeConcern && this.hasAspect(Aspect.WRITE_OPERATION)) {
|
||||
Object.assign(cmd, { writeConcern: this.writeConcern });
|
||||
}
|
||||
|
||||
if (options.collation && typeof options.collation === 'object') {
|
||||
Object.assign(cmd, { collation: options.collation });
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof options.maxTimeMS === 'number') {
|
||||
cmd.maxTimeMS = options.maxTimeMS;
|
||||
}
|
||||
|
||||
if (typeof options.comment === 'string') {
|
||||
cmd.comment = options.comment;
|
||||
}
|
||||
|
||||
if (this.logger && this.logger.isDebug()) {
|
||||
this.logger.debug(`executing command ${JSON.stringify(cmd)} against ${this.ns}`);
|
||||
}
|
||||
|
||||
server.command(this.ns.toString(), cmd, this.options, (err, result) => {
|
||||
if (err) {
|
||||
callback(err, null);
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.fullResponse) {
|
||||
callback(null, result);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(null, result.result);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function resolveWriteConcern(parent, options) {
|
||||
return WriteConcern.fromOptions(options) || (parent && parent.writeConcern);
|
||||
}
|
||||
|
||||
function resolveReadConcern(parent, options) {
|
||||
return ReadConcern.fromOptions(options) || (parent && parent.readConcern);
|
||||
}
|
||||
|
||||
module.exports = CommandOperationV2;
|
||||
412
node_modules/mongodb/lib/operations/common_functions.js
generated
vendored
Normal file
412
node_modules/mongodb/lib/operations/common_functions.js
generated
vendored
Normal file
@@ -0,0 +1,412 @@
|
||||
'use strict';
|
||||
|
||||
const applyRetryableWrites = require('../utils').applyRetryableWrites;
|
||||
const applyWriteConcern = require('../utils').applyWriteConcern;
|
||||
const decorateWithCollation = require('../utils').decorateWithCollation;
|
||||
const decorateWithReadConcern = require('../utils').decorateWithReadConcern;
|
||||
const executeCommand = require('./db_ops').executeCommand;
|
||||
const formattedOrderClause = require('../utils').formattedOrderClause;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const MongoError = require('../core').MongoError;
|
||||
const ReadPreference = require('../core').ReadPreference;
|
||||
const toError = require('../utils').toError;
|
||||
const CursorState = require('../core/cursor').CursorState;
|
||||
|
||||
/**
|
||||
* Build the count command.
|
||||
*
|
||||
* @method
|
||||
* @param {collectionOrCursor} an instance of a collection or cursor
|
||||
* @param {object} query The query for the count.
|
||||
* @param {object} [options] Optional settings. See Collection.prototype.count and Cursor.prototype.count for a list of options.
|
||||
*/
|
||||
function buildCountCommand(collectionOrCursor, query, options) {
|
||||
const skip = options.skip;
|
||||
const limit = options.limit;
|
||||
let hint = options.hint;
|
||||
const maxTimeMS = options.maxTimeMS;
|
||||
query = query || {};
|
||||
|
||||
// Final query
|
||||
const cmd = {
|
||||
count: options.collectionName,
|
||||
query: query
|
||||
};
|
||||
|
||||
if (collectionOrCursor.s.numberOfRetries) {
|
||||
// collectionOrCursor is a cursor
|
||||
if (collectionOrCursor.options.hint) {
|
||||
hint = collectionOrCursor.options.hint;
|
||||
} else if (collectionOrCursor.cmd.hint) {
|
||||
hint = collectionOrCursor.cmd.hint;
|
||||
}
|
||||
decorateWithCollation(cmd, collectionOrCursor, collectionOrCursor.cmd);
|
||||
} else {
|
||||
decorateWithCollation(cmd, collectionOrCursor, options);
|
||||
}
|
||||
|
||||
// Add limit, skip and maxTimeMS if defined
|
||||
if (typeof skip === 'number') cmd.skip = skip;
|
||||
if (typeof limit === 'number') cmd.limit = limit;
|
||||
if (typeof maxTimeMS === 'number') cmd.maxTimeMS = maxTimeMS;
|
||||
if (hint) cmd.hint = hint;
|
||||
|
||||
// Do we have a readConcern specified
|
||||
decorateWithReadConcern(cmd, collectionOrCursor);
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
function deleteCallback(err, r, callback) {
|
||||
if (callback == null) return;
|
||||
if (err && callback) return callback(err);
|
||||
if (r == null) return callback(null, { result: { ok: 1 } });
|
||||
r.deletedCount = r.result.n;
|
||||
if (callback) callback(null, r);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find and update a document.
|
||||
*
|
||||
* @method
|
||||
* @param {Collection} a Collection instance.
|
||||
* @param {object} query Query object to locate the object to modify.
|
||||
* @param {array} sort If multiple docs match, choose the first one in the specified sort order as the object to manipulate.
|
||||
* @param {object} doc The fields/vals to be updated.
|
||||
* @param {object} [options] Optional settings. See Collection.prototype.findAndModify for a list of options.
|
||||
* @param {Collection~findAndModifyCallback} [callback] The command result callback
|
||||
* @deprecated use findOneAndUpdate, findOneAndReplace or findOneAndDelete instead
|
||||
*/
|
||||
function findAndModify(coll, query, sort, doc, options, callback) {
|
||||
// Create findAndModify command object
|
||||
const queryObject = {
|
||||
findAndModify: coll.collectionName,
|
||||
query: query
|
||||
};
|
||||
|
||||
sort = formattedOrderClause(sort);
|
||||
if (sort) {
|
||||
queryObject.sort = sort;
|
||||
}
|
||||
|
||||
queryObject.new = options.new ? true : false;
|
||||
queryObject.remove = options.remove ? true : false;
|
||||
queryObject.upsert = options.upsert ? true : false;
|
||||
|
||||
const projection = options.projection || options.fields;
|
||||
|
||||
if (projection) {
|
||||
queryObject.fields = projection;
|
||||
}
|
||||
|
||||
if (options.arrayFilters) {
|
||||
queryObject.arrayFilters = options.arrayFilters;
|
||||
delete options.arrayFilters;
|
||||
}
|
||||
|
||||
if (doc && !options.remove) {
|
||||
queryObject.update = doc;
|
||||
}
|
||||
|
||||
if (options.maxTimeMS) queryObject.maxTimeMS = options.maxTimeMS;
|
||||
|
||||
// Either use override on the function, or go back to default on either the collection
|
||||
// level or db
|
||||
options.serializeFunctions = options.serializeFunctions || coll.s.serializeFunctions;
|
||||
|
||||
// No check on the documents
|
||||
options.checkKeys = false;
|
||||
|
||||
// Final options for retryable writes and write concern
|
||||
let finalOptions = Object.assign({}, options);
|
||||
finalOptions = applyRetryableWrites(finalOptions, coll.s.db);
|
||||
finalOptions = applyWriteConcern(finalOptions, { db: coll.s.db, collection: coll }, options);
|
||||
|
||||
// Decorate the findAndModify command with the write Concern
|
||||
if (finalOptions.writeConcern) {
|
||||
queryObject.writeConcern = finalOptions.writeConcern;
|
||||
}
|
||||
|
||||
// Have we specified bypassDocumentValidation
|
||||
if (finalOptions.bypassDocumentValidation === true) {
|
||||
queryObject.bypassDocumentValidation = finalOptions.bypassDocumentValidation;
|
||||
}
|
||||
|
||||
finalOptions.readPreference = ReadPreference.primary;
|
||||
|
||||
// Have we specified collation
|
||||
try {
|
||||
decorateWithCollation(queryObject, coll, finalOptions);
|
||||
} catch (err) {
|
||||
return callback(err, null);
|
||||
}
|
||||
|
||||
// Execute the command
|
||||
executeCommand(coll.s.db, queryObject, finalOptions, (err, result) => {
|
||||
if (err) return handleCallback(callback, err, null);
|
||||
|
||||
return handleCallback(callback, null, result);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves this collections index info.
|
||||
*
|
||||
* @method
|
||||
* @param {Db} db The Db instance on which to retrieve the index info.
|
||||
* @param {string} name The name of the collection.
|
||||
* @param {object} [options] Optional settings. See Db.prototype.indexInformation for a list of options.
|
||||
* @param {Db~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
function indexInformation(db, name, options, callback) {
|
||||
// If we specified full information
|
||||
const full = options['full'] == null ? false : options['full'];
|
||||
|
||||
// Did the user destroy the topology
|
||||
if (db.serverConfig && db.serverConfig.isDestroyed())
|
||||
return callback(new MongoError('topology was destroyed'));
|
||||
// Process all the results from the index command and collection
|
||||
function processResults(indexes) {
|
||||
// Contains all the information
|
||||
let info = {};
|
||||
// Process all the indexes
|
||||
for (let i = 0; i < indexes.length; i++) {
|
||||
const index = indexes[i];
|
||||
// Let's unpack the object
|
||||
info[index.name] = [];
|
||||
for (let name in index.key) {
|
||||
info[index.name].push([name, index.key[name]]);
|
||||
}
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
// Get the list of indexes of the specified collection
|
||||
db.collection(name)
|
||||
.listIndexes(options)
|
||||
.toArray((err, indexes) => {
|
||||
if (err) return callback(toError(err));
|
||||
if (!Array.isArray(indexes)) return handleCallback(callback, null, []);
|
||||
if (full) return handleCallback(callback, null, indexes);
|
||||
handleCallback(callback, null, processResults(indexes));
|
||||
});
|
||||
}
|
||||
|
||||
function prepareDocs(coll, docs, options) {
|
||||
const forceServerObjectId =
|
||||
typeof options.forceServerObjectId === 'boolean'
|
||||
? options.forceServerObjectId
|
||||
: coll.s.db.options.forceServerObjectId;
|
||||
|
||||
// no need to modify the docs if server sets the ObjectId
|
||||
if (forceServerObjectId === true) {
|
||||
return docs;
|
||||
}
|
||||
|
||||
return docs.map(doc => {
|
||||
if (forceServerObjectId !== true && doc._id == null) {
|
||||
doc._id = coll.s.pkFactory.createPk();
|
||||
}
|
||||
|
||||
return doc;
|
||||
});
|
||||
}
|
||||
|
||||
// Get the next available document from the cursor, returns null if no more documents are available.
|
||||
function nextObject(cursor, callback) {
|
||||
if (cursor.s.state === CursorState.CLOSED || (cursor.isDead && cursor.isDead())) {
|
||||
return handleCallback(
|
||||
callback,
|
||||
MongoError.create({ message: 'Cursor is closed', driver: true })
|
||||
);
|
||||
}
|
||||
|
||||
if (cursor.s.state === CursorState.INIT && cursor.cmd && cursor.cmd.sort) {
|
||||
try {
|
||||
cursor.cmd.sort = formattedOrderClause(cursor.cmd.sort);
|
||||
} catch (err) {
|
||||
return handleCallback(callback, err);
|
||||
}
|
||||
}
|
||||
|
||||
// Get the next object
|
||||
cursor._next((err, doc) => {
|
||||
cursor.s.state = CursorState.OPEN;
|
||||
if (err) return handleCallback(callback, err);
|
||||
handleCallback(callback, null, doc);
|
||||
});
|
||||
}
|
||||
|
||||
function insertDocuments(coll, docs, options, callback) {
|
||||
if (typeof options === 'function') (callback = options), (options = {});
|
||||
options = options || {};
|
||||
// Ensure we are operating on an array op docs
|
||||
docs = Array.isArray(docs) ? docs : [docs];
|
||||
|
||||
// Final options for retryable writes and write concern
|
||||
let finalOptions = Object.assign({}, options);
|
||||
finalOptions = applyRetryableWrites(finalOptions, coll.s.db);
|
||||
finalOptions = applyWriteConcern(finalOptions, { db: coll.s.db, collection: coll }, options);
|
||||
|
||||
// If keep going set unordered
|
||||
if (finalOptions.keepGoing === true) finalOptions.ordered = false;
|
||||
finalOptions.serializeFunctions = options.serializeFunctions || coll.s.serializeFunctions;
|
||||
|
||||
docs = prepareDocs(coll, docs, options);
|
||||
|
||||
// File inserts
|
||||
coll.s.topology.insert(coll.s.namespace, docs, finalOptions, (err, result) => {
|
||||
if (callback == null) return;
|
||||
if (err) return handleCallback(callback, err);
|
||||
if (result == null) return handleCallback(callback, null, null);
|
||||
if (result.result.code) return handleCallback(callback, toError(result.result));
|
||||
if (result.result.writeErrors)
|
||||
return handleCallback(callback, toError(result.result.writeErrors[0]));
|
||||
// Add docs to the list
|
||||
result.ops = docs;
|
||||
// Return the results
|
||||
handleCallback(callback, null, result);
|
||||
});
|
||||
}
|
||||
|
||||
function removeDocuments(coll, selector, options, callback) {
|
||||
if (typeof options === 'function') {
|
||||
(callback = options), (options = {});
|
||||
} else if (typeof selector === 'function') {
|
||||
callback = selector;
|
||||
options = {};
|
||||
selector = {};
|
||||
}
|
||||
|
||||
// Create an empty options object if the provided one is null
|
||||
options = options || {};
|
||||
|
||||
// Final options for retryable writes and write concern
|
||||
let finalOptions = Object.assign({}, options);
|
||||
finalOptions = applyRetryableWrites(finalOptions, coll.s.db);
|
||||
finalOptions = applyWriteConcern(finalOptions, { db: coll.s.db, collection: coll }, options);
|
||||
|
||||
// If selector is null set empty
|
||||
if (selector == null) selector = {};
|
||||
|
||||
// Build the op
|
||||
const op = { q: selector, limit: 0 };
|
||||
if (options.single) {
|
||||
op.limit = 1;
|
||||
} else if (finalOptions.retryWrites) {
|
||||
finalOptions.retryWrites = false;
|
||||
}
|
||||
if (options.hint) {
|
||||
op.hint = options.hint;
|
||||
}
|
||||
|
||||
// Have we specified collation
|
||||
try {
|
||||
decorateWithCollation(finalOptions, coll, options);
|
||||
} catch (err) {
|
||||
return callback(err, null);
|
||||
}
|
||||
|
||||
// Execute the remove
|
||||
coll.s.topology.remove(coll.s.namespace, [op], finalOptions, (err, result) => {
|
||||
if (callback == null) return;
|
||||
if (err) return handleCallback(callback, err, null);
|
||||
if (result == null) return handleCallback(callback, null, null);
|
||||
if (result.result.code) return handleCallback(callback, toError(result.result));
|
||||
if (result.result.writeErrors) {
|
||||
return handleCallback(callback, toError(result.result.writeErrors[0]));
|
||||
}
|
||||
|
||||
// Return the results
|
||||
handleCallback(callback, null, result);
|
||||
});
|
||||
}
|
||||
|
||||
function updateDocuments(coll, selector, document, options, callback) {
|
||||
if ('function' === typeof options) (callback = options), (options = null);
|
||||
if (options == null) options = {};
|
||||
if (!('function' === typeof callback)) callback = null;
|
||||
|
||||
// If we are not providing a selector or document throw
|
||||
if (selector == null || typeof selector !== 'object')
|
||||
return callback(toError('selector must be a valid JavaScript object'));
|
||||
if (document == null || typeof document !== 'object')
|
||||
return callback(toError('document must be a valid JavaScript object'));
|
||||
|
||||
// Final options for retryable writes and write concern
|
||||
let finalOptions = Object.assign({}, options);
|
||||
finalOptions = applyRetryableWrites(finalOptions, coll.s.db);
|
||||
finalOptions = applyWriteConcern(finalOptions, { db: coll.s.db, collection: coll }, options);
|
||||
|
||||
// Do we return the actual result document
|
||||
// Either use override on the function, or go back to default on either the collection
|
||||
// level or db
|
||||
finalOptions.serializeFunctions = options.serializeFunctions || coll.s.serializeFunctions;
|
||||
|
||||
// Execute the operation
|
||||
const op = { q: selector, u: document };
|
||||
op.upsert = options.upsert !== void 0 ? !!options.upsert : false;
|
||||
op.multi = options.multi !== void 0 ? !!options.multi : false;
|
||||
|
||||
if (options.hint) {
|
||||
op.hint = options.hint;
|
||||
}
|
||||
|
||||
if (finalOptions.arrayFilters) {
|
||||
op.arrayFilters = finalOptions.arrayFilters;
|
||||
delete finalOptions.arrayFilters;
|
||||
}
|
||||
|
||||
if (finalOptions.retryWrites && op.multi) {
|
||||
finalOptions.retryWrites = false;
|
||||
}
|
||||
|
||||
// Have we specified collation
|
||||
try {
|
||||
decorateWithCollation(finalOptions, coll, options);
|
||||
} catch (err) {
|
||||
return callback(err, null);
|
||||
}
|
||||
|
||||
// Update options
|
||||
coll.s.topology.update(coll.s.namespace, [op], finalOptions, (err, result) => {
|
||||
if (callback == null) return;
|
||||
if (err) return handleCallback(callback, err, null);
|
||||
if (result == null) return handleCallback(callback, null, null);
|
||||
if (result.result.code) return handleCallback(callback, toError(result.result));
|
||||
if (result.result.writeErrors)
|
||||
return handleCallback(callback, toError(result.result.writeErrors[0]));
|
||||
// Return the results
|
||||
handleCallback(callback, null, result);
|
||||
});
|
||||
}
|
||||
|
||||
function updateCallback(err, r, callback) {
|
||||
if (callback == null) return;
|
||||
if (err) return callback(err);
|
||||
if (r == null) return callback(null, { result: { ok: 1 } });
|
||||
r.modifiedCount = r.result.nModified != null ? r.result.nModified : r.result.n;
|
||||
r.upsertedId =
|
||||
Array.isArray(r.result.upserted) && r.result.upserted.length > 0
|
||||
? r.result.upserted[0] // FIXME(major): should be `r.result.upserted[0]._id`
|
||||
: null;
|
||||
r.upsertedCount =
|
||||
Array.isArray(r.result.upserted) && r.result.upserted.length ? r.result.upserted.length : 0;
|
||||
r.matchedCount =
|
||||
Array.isArray(r.result.upserted) && r.result.upserted.length > 0 ? 0 : r.result.n;
|
||||
callback(null, r);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
buildCountCommand,
|
||||
deleteCallback,
|
||||
findAndModify,
|
||||
indexInformation,
|
||||
nextObject,
|
||||
prepareDocs,
|
||||
insertDocuments,
|
||||
removeDocuments,
|
||||
updateDocuments,
|
||||
updateCallback
|
||||
};
|
||||
806
node_modules/mongodb/lib/operations/connect.js
generated
vendored
Normal file
806
node_modules/mongodb/lib/operations/connect.js
generated
vendored
Normal file
@@ -0,0 +1,806 @@
|
||||
'use strict';
|
||||
|
||||
const deprecate = require('util').deprecate;
|
||||
const Logger = require('../core').Logger;
|
||||
const MongoCredentials = require('../core').MongoCredentials;
|
||||
const MongoError = require('../core').MongoError;
|
||||
const Mongos = require('../topologies/mongos');
|
||||
const NativeTopology = require('../topologies/native_topology');
|
||||
const parse = require('../core').parseConnectionString;
|
||||
const ReadConcern = require('../read_concern');
|
||||
const ReadPreference = require('../core').ReadPreference;
|
||||
const ReplSet = require('../topologies/replset');
|
||||
const Server = require('../topologies/server');
|
||||
const ServerSessionPool = require('../core').Sessions.ServerSessionPool;
|
||||
const emitDeprecationWarning = require('../utils').emitDeprecationWarning;
|
||||
const fs = require('fs');
|
||||
const BSON = require('../core/connection/utils').retrieveBSON();
|
||||
const CMAP_EVENT_NAMES = require('../cmap/events').CMAP_EVENT_NAMES;
|
||||
|
||||
let client;
|
||||
function loadClient() {
|
||||
if (!client) {
|
||||
client = require('../mongo_client');
|
||||
}
|
||||
return client;
|
||||
}
|
||||
|
||||
const legacyParse = deprecate(
|
||||
require('../url_parser'),
|
||||
'current URL string parser is deprecated, and will be removed in a future version. ' +
|
||||
'To use the new parser, pass option { useNewUrlParser: true } to MongoClient.connect.'
|
||||
);
|
||||
|
||||
const AUTH_MECHANISM_INTERNAL_MAP = {
|
||||
DEFAULT: 'default',
|
||||
PLAIN: 'plain',
|
||||
GSSAPI: 'gssapi',
|
||||
'MONGODB-CR': 'mongocr',
|
||||
'MONGODB-X509': 'x509',
|
||||
'MONGODB-AWS': 'mongodb-aws',
|
||||
'SCRAM-SHA-1': 'scram-sha-1',
|
||||
'SCRAM-SHA-256': 'scram-sha-256'
|
||||
};
|
||||
|
||||
const monitoringEvents = [
|
||||
'timeout',
|
||||
'close',
|
||||
'serverOpening',
|
||||
'serverDescriptionChanged',
|
||||
'serverHeartbeatStarted',
|
||||
'serverHeartbeatSucceeded',
|
||||
'serverHeartbeatFailed',
|
||||
'serverClosed',
|
||||
'topologyOpening',
|
||||
'topologyClosed',
|
||||
'topologyDescriptionChanged',
|
||||
'commandStarted',
|
||||
'commandSucceeded',
|
||||
'commandFailed',
|
||||
'joined',
|
||||
'left',
|
||||
'ping',
|
||||
'ha',
|
||||
'all',
|
||||
'fullsetup',
|
||||
'open'
|
||||
];
|
||||
|
||||
const VALID_AUTH_MECHANISMS = new Set([
|
||||
'DEFAULT',
|
||||
'PLAIN',
|
||||
'GSSAPI',
|
||||
'MONGODB-CR',
|
||||
'MONGODB-X509',
|
||||
'MONGODB-AWS',
|
||||
'SCRAM-SHA-1',
|
||||
'SCRAM-SHA-256'
|
||||
]);
|
||||
|
||||
const validOptionNames = [
|
||||
'poolSize',
|
||||
'ssl',
|
||||
'sslValidate',
|
||||
'sslCA',
|
||||
'sslCert',
|
||||
'sslKey',
|
||||
'sslPass',
|
||||
'sslCRL',
|
||||
'autoReconnect',
|
||||
'noDelay',
|
||||
'keepAlive',
|
||||
'keepAliveInitialDelay',
|
||||
'connectTimeoutMS',
|
||||
'family',
|
||||
'socketTimeoutMS',
|
||||
'reconnectTries',
|
||||
'reconnectInterval',
|
||||
'ha',
|
||||
'haInterval',
|
||||
'replicaSet',
|
||||
'secondaryAcceptableLatencyMS',
|
||||
'acceptableLatencyMS',
|
||||
'connectWithNoPrimary',
|
||||
'authSource',
|
||||
'w',
|
||||
'wtimeout',
|
||||
'j',
|
||||
'forceServerObjectId',
|
||||
'serializeFunctions',
|
||||
'ignoreUndefined',
|
||||
'raw',
|
||||
'bufferMaxEntries',
|
||||
'readPreference',
|
||||
'pkFactory',
|
||||
'promiseLibrary',
|
||||
'readConcern',
|
||||
'maxStalenessSeconds',
|
||||
'loggerLevel',
|
||||
'logger',
|
||||
'promoteValues',
|
||||
'promoteBuffers',
|
||||
'promoteLongs',
|
||||
'domainsEnabled',
|
||||
'checkServerIdentity',
|
||||
'validateOptions',
|
||||
'appname',
|
||||
'auth',
|
||||
'user',
|
||||
'password',
|
||||
'authMechanism',
|
||||
'compression',
|
||||
'fsync',
|
||||
'readPreferenceTags',
|
||||
'numberOfRetries',
|
||||
'auto_reconnect',
|
||||
'minSize',
|
||||
'monitorCommands',
|
||||
'retryWrites',
|
||||
'retryReads',
|
||||
'useNewUrlParser',
|
||||
'useUnifiedTopology',
|
||||
'serverSelectionTimeoutMS',
|
||||
'useRecoveryToken',
|
||||
'autoEncryption',
|
||||
'driverInfo',
|
||||
'tls',
|
||||
'tlsInsecure',
|
||||
'tlsinsecure',
|
||||
'tlsAllowInvalidCertificates',
|
||||
'tlsAllowInvalidHostnames',
|
||||
'tlsCAFile',
|
||||
'tlsCertificateFile',
|
||||
'tlsCertificateKeyFile',
|
||||
'tlsCertificateKeyFilePassword',
|
||||
'minHeartbeatFrequencyMS',
|
||||
'heartbeatFrequencyMS',
|
||||
'directConnection',
|
||||
'appName',
|
||||
|
||||
// CMAP options
|
||||
'maxPoolSize',
|
||||
'minPoolSize',
|
||||
'maxIdleTimeMS',
|
||||
'waitQueueTimeoutMS'
|
||||
];
|
||||
|
||||
const ignoreOptionNames = ['native_parser'];
|
||||
const legacyOptionNames = ['server', 'replset', 'replSet', 'mongos', 'db'];
|
||||
|
||||
// Validate options object
|
||||
function validOptions(options) {
|
||||
const _validOptions = validOptionNames.concat(legacyOptionNames);
|
||||
|
||||
for (const name in options) {
|
||||
if (ignoreOptionNames.indexOf(name) !== -1) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (_validOptions.indexOf(name) === -1) {
|
||||
if (options.validateOptions) {
|
||||
return new MongoError(`option ${name} is not supported`);
|
||||
} else {
|
||||
console.warn(`the options [${name}] is not supported`);
|
||||
}
|
||||
}
|
||||
|
||||
if (legacyOptionNames.indexOf(name) !== -1) {
|
||||
console.warn(
|
||||
`the server/replset/mongos/db options are deprecated, ` +
|
||||
`all their options are supported at the top level of the options object [${validOptionNames}]`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const LEGACY_OPTIONS_MAP = validOptionNames.reduce((obj, name) => {
|
||||
obj[name.toLowerCase()] = name;
|
||||
return obj;
|
||||
}, {});
|
||||
|
||||
function addListeners(mongoClient, topology) {
|
||||
topology.on('authenticated', createListener(mongoClient, 'authenticated'));
|
||||
topology.on('error', createListener(mongoClient, 'error'));
|
||||
topology.on('timeout', createListener(mongoClient, 'timeout'));
|
||||
topology.on('close', createListener(mongoClient, 'close'));
|
||||
topology.on('parseError', createListener(mongoClient, 'parseError'));
|
||||
topology.once('open', createListener(mongoClient, 'open'));
|
||||
topology.once('fullsetup', createListener(mongoClient, 'fullsetup'));
|
||||
topology.once('all', createListener(mongoClient, 'all'));
|
||||
topology.on('reconnect', createListener(mongoClient, 'reconnect'));
|
||||
}
|
||||
|
||||
function assignTopology(client, topology) {
|
||||
client.topology = topology;
|
||||
|
||||
if (!(topology instanceof NativeTopology)) {
|
||||
topology.s.sessionPool = new ServerSessionPool(topology.s.coreTopology);
|
||||
}
|
||||
}
|
||||
|
||||
// Clear out all events
|
||||
function clearAllEvents(topology) {
|
||||
monitoringEvents.forEach(event => topology.removeAllListeners(event));
|
||||
}
|
||||
|
||||
// Collect all events in order from SDAM
|
||||
function collectEvents(mongoClient, topology) {
|
||||
let MongoClient = loadClient();
|
||||
const collectedEvents = [];
|
||||
|
||||
if (mongoClient instanceof MongoClient) {
|
||||
monitoringEvents.forEach(event => {
|
||||
topology.on(event, (object1, object2) => {
|
||||
if (event === 'open') {
|
||||
collectedEvents.push({ event: event, object1: mongoClient });
|
||||
} else {
|
||||
collectedEvents.push({ event: event, object1: object1, object2: object2 });
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return collectedEvents;
|
||||
}
|
||||
|
||||
function resolveTLSOptions(options) {
|
||||
if (options.tls == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
['sslCA', 'sslKey', 'sslCert'].forEach(optionName => {
|
||||
if (options[optionName]) {
|
||||
options[optionName] = fs.readFileSync(options[optionName]);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const emitDeprecationForNonUnifiedTopology = deprecate(() => {},
|
||||
'current Server Discovery and Monitoring engine is deprecated, and will be removed in a future version. ' + 'To use the new Server Discover and Monitoring engine, pass option { useUnifiedTopology: true } to the MongoClient constructor.');
|
||||
|
||||
function connect(mongoClient, url, options, callback) {
|
||||
options = Object.assign({}, options);
|
||||
|
||||
// If callback is null throw an exception
|
||||
if (callback == null) {
|
||||
throw new Error('no callback function provided');
|
||||
}
|
||||
|
||||
let didRequestAuthentication = false;
|
||||
const logger = Logger('MongoClient', options);
|
||||
|
||||
// Did we pass in a Server/ReplSet/Mongos
|
||||
if (url instanceof Server || url instanceof ReplSet || url instanceof Mongos) {
|
||||
return connectWithUrl(mongoClient, url, options, connectCallback);
|
||||
}
|
||||
|
||||
const useNewUrlParser = options.useNewUrlParser !== false;
|
||||
|
||||
const parseFn = useNewUrlParser ? parse : legacyParse;
|
||||
const transform = useNewUrlParser ? transformUrlOptions : legacyTransformUrlOptions;
|
||||
|
||||
parseFn(url, options, (err, _object) => {
|
||||
// Do not attempt to connect if parsing error
|
||||
if (err) return callback(err);
|
||||
|
||||
// Flatten
|
||||
const object = transform(_object);
|
||||
|
||||
// Parse the string
|
||||
const _finalOptions = createUnifiedOptions(object, options);
|
||||
|
||||
// Check if we have connection and socket timeout set
|
||||
if (_finalOptions.socketTimeoutMS == null) _finalOptions.socketTimeoutMS = 360000;
|
||||
if (_finalOptions.connectTimeoutMS == null) _finalOptions.connectTimeoutMS = 10000;
|
||||
if (_finalOptions.retryWrites == null) _finalOptions.retryWrites = true;
|
||||
if (_finalOptions.useRecoveryToken == null) _finalOptions.useRecoveryToken = true;
|
||||
if (_finalOptions.readPreference == null) _finalOptions.readPreference = 'primary';
|
||||
|
||||
if (_finalOptions.db_options && _finalOptions.db_options.auth) {
|
||||
delete _finalOptions.db_options.auth;
|
||||
}
|
||||
|
||||
// `journal` should be translated to `j` for the driver
|
||||
if (_finalOptions.journal != null) {
|
||||
_finalOptions.j = _finalOptions.journal;
|
||||
_finalOptions.journal = undefined;
|
||||
}
|
||||
|
||||
// resolve tls options if needed
|
||||
resolveTLSOptions(_finalOptions);
|
||||
|
||||
// Store the merged options object
|
||||
mongoClient.s.options = _finalOptions;
|
||||
|
||||
// Failure modes
|
||||
if (object.servers.length === 0) {
|
||||
return callback(new Error('connection string must contain at least one seed host'));
|
||||
}
|
||||
|
||||
if (_finalOptions.auth && !_finalOptions.credentials) {
|
||||
try {
|
||||
didRequestAuthentication = true;
|
||||
_finalOptions.credentials = generateCredentials(
|
||||
mongoClient,
|
||||
_finalOptions.auth.user,
|
||||
_finalOptions.auth.password,
|
||||
_finalOptions
|
||||
);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
}
|
||||
|
||||
if (_finalOptions.useUnifiedTopology) {
|
||||
return createTopology(mongoClient, 'unified', _finalOptions, connectCallback);
|
||||
}
|
||||
|
||||
emitDeprecationForNonUnifiedTopology();
|
||||
|
||||
// Do we have a replicaset then skip discovery and go straight to connectivity
|
||||
if (_finalOptions.replicaSet || _finalOptions.rs_name) {
|
||||
return createTopology(mongoClient, 'replicaset', _finalOptions, connectCallback);
|
||||
} else if (object.servers.length > 1) {
|
||||
return createTopology(mongoClient, 'mongos', _finalOptions, connectCallback);
|
||||
} else {
|
||||
return createServer(mongoClient, _finalOptions, connectCallback);
|
||||
}
|
||||
});
|
||||
|
||||
function connectCallback(err, topology) {
|
||||
const warningMessage = `seed list contains no mongos proxies, replicaset connections requires the parameter replicaSet to be supplied in the URI or options object, mongodb://server:port/db?replicaSet=name`;
|
||||
if (err && err.message === 'no mongos proxies found in seed list') {
|
||||
if (logger.isWarn()) {
|
||||
logger.warn(warningMessage);
|
||||
}
|
||||
|
||||
// Return a more specific error message for MongoClient.connect
|
||||
return callback(new MongoError(warningMessage));
|
||||
}
|
||||
|
||||
if (didRequestAuthentication) {
|
||||
mongoClient.emit('authenticated', null, true);
|
||||
}
|
||||
|
||||
// Return the error and db instance
|
||||
callback(err, topology);
|
||||
}
|
||||
}
|
||||
|
||||
function connectWithUrl(mongoClient, url, options, connectCallback) {
|
||||
// Set the topology
|
||||
assignTopology(mongoClient, url);
|
||||
|
||||
// Add listeners
|
||||
addListeners(mongoClient, url);
|
||||
|
||||
// Propagate the events to the client
|
||||
relayEvents(mongoClient, url);
|
||||
|
||||
let finalOptions = Object.assign({}, options);
|
||||
|
||||
// If we have a readPreference passed in by the db options, convert it from a string
|
||||
if (typeof options.readPreference === 'string' || typeof options.read_preference === 'string') {
|
||||
finalOptions.readPreference = new ReadPreference(
|
||||
options.readPreference || options.read_preference
|
||||
);
|
||||
}
|
||||
|
||||
const isDoingAuth = finalOptions.user || finalOptions.password || finalOptions.authMechanism;
|
||||
if (isDoingAuth && !finalOptions.credentials) {
|
||||
try {
|
||||
finalOptions.credentials = generateCredentials(
|
||||
mongoClient,
|
||||
finalOptions.user,
|
||||
finalOptions.password,
|
||||
finalOptions
|
||||
);
|
||||
} catch (err) {
|
||||
return connectCallback(err, url);
|
||||
}
|
||||
}
|
||||
|
||||
return url.connect(finalOptions, connectCallback);
|
||||
}
|
||||
|
||||
function createListener(mongoClient, event) {
|
||||
const eventSet = new Set(['all', 'fullsetup', 'open', 'reconnect']);
|
||||
return (v1, v2) => {
|
||||
if (eventSet.has(event)) {
|
||||
return mongoClient.emit(event, mongoClient);
|
||||
}
|
||||
|
||||
mongoClient.emit(event, v1, v2);
|
||||
};
|
||||
}
|
||||
|
||||
function createServer(mongoClient, options, callback) {
|
||||
// Pass in the promise library
|
||||
options.promiseLibrary = mongoClient.s.promiseLibrary;
|
||||
|
||||
// Set default options
|
||||
const servers = translateOptions(options);
|
||||
|
||||
const server = servers[0];
|
||||
|
||||
// Propagate the events to the client
|
||||
const collectedEvents = collectEvents(mongoClient, server);
|
||||
|
||||
// Connect to topology
|
||||
server.connect(options, (err, topology) => {
|
||||
if (err) {
|
||||
server.close(true);
|
||||
return callback(err);
|
||||
}
|
||||
// Clear out all the collected event listeners
|
||||
clearAllEvents(server);
|
||||
|
||||
// Relay all the events
|
||||
relayEvents(mongoClient, server);
|
||||
// Add listeners
|
||||
addListeners(mongoClient, server);
|
||||
// Check if we are really speaking to a mongos
|
||||
const ismaster = topology.lastIsMaster();
|
||||
|
||||
// Set the topology
|
||||
assignTopology(mongoClient, topology);
|
||||
|
||||
// Do we actually have a mongos
|
||||
if (ismaster && ismaster.msg === 'isdbgrid') {
|
||||
// Destroy the current connection
|
||||
topology.close();
|
||||
// Create mongos connection instead
|
||||
return createTopology(mongoClient, 'mongos', options, callback);
|
||||
}
|
||||
|
||||
// Fire all the events
|
||||
replayEvents(mongoClient, collectedEvents);
|
||||
// Otherwise callback
|
||||
callback(err, topology);
|
||||
});
|
||||
}
|
||||
|
||||
const DEPRECATED_UNIFIED_EVENTS = new Set([
|
||||
'reconnect',
|
||||
'reconnectFailed',
|
||||
'attemptReconnect',
|
||||
'joined',
|
||||
'left',
|
||||
'ping',
|
||||
'ha',
|
||||
'all',
|
||||
'fullsetup',
|
||||
'open'
|
||||
]);
|
||||
|
||||
function registerDeprecatedEventNotifiers(client) {
|
||||
client.on('newListener', eventName => {
|
||||
if (DEPRECATED_UNIFIED_EVENTS.has(eventName)) {
|
||||
emitDeprecationWarning(
|
||||
`The \`${eventName}\` event is no longer supported by the unified topology, please read more by visiting http://bit.ly/2D8WfT6`,
|
||||
'DeprecationWarning'
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function createTopology(mongoClient, topologyType, options, callback) {
|
||||
// Pass in the promise library
|
||||
options.promiseLibrary = mongoClient.s.promiseLibrary;
|
||||
|
||||
const translationOptions = {};
|
||||
if (topologyType === 'unified') translationOptions.createServers = false;
|
||||
|
||||
// Set default options
|
||||
const servers = translateOptions(options, translationOptions);
|
||||
|
||||
// determine CSFLE support
|
||||
if (options.autoEncryption != null) {
|
||||
let AutoEncrypter;
|
||||
try {
|
||||
require.resolve('mongodb-client-encryption');
|
||||
} catch (err) {
|
||||
callback(
|
||||
new MongoError(
|
||||
'Auto-encryption requested, but the module is not installed. Please add `mongodb-client-encryption` as a dependency of your project'
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
let mongodbClientEncryption = require('mongodb-client-encryption');
|
||||
if (typeof mongodbClientEncryption.extension !== 'function') {
|
||||
callback(
|
||||
new MongoError(
|
||||
'loaded version of `mongodb-client-encryption` does not have property `extension`. Please make sure you are loading the correct version of `mongodb-client-encryption`'
|
||||
)
|
||||
);
|
||||
}
|
||||
AutoEncrypter = mongodbClientEncryption.extension(require('../../index')).AutoEncrypter;
|
||||
} catch (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const mongoCryptOptions = Object.assign(
|
||||
{
|
||||
bson:
|
||||
options.bson ||
|
||||
new BSON([
|
||||
BSON.Binary,
|
||||
BSON.Code,
|
||||
BSON.DBRef,
|
||||
BSON.Decimal128,
|
||||
BSON.Double,
|
||||
BSON.Int32,
|
||||
BSON.Long,
|
||||
BSON.Map,
|
||||
BSON.MaxKey,
|
||||
BSON.MinKey,
|
||||
BSON.ObjectId,
|
||||
BSON.BSONRegExp,
|
||||
BSON.Symbol,
|
||||
BSON.Timestamp
|
||||
])
|
||||
},
|
||||
options.autoEncryption
|
||||
);
|
||||
|
||||
options.autoEncrypter = new AutoEncrypter(mongoClient, mongoCryptOptions);
|
||||
}
|
||||
|
||||
// Create the topology
|
||||
let topology;
|
||||
if (topologyType === 'mongos') {
|
||||
topology = new Mongos(servers, options);
|
||||
} else if (topologyType === 'replicaset') {
|
||||
topology = new ReplSet(servers, options);
|
||||
} else if (topologyType === 'unified') {
|
||||
topology = new NativeTopology(options.servers, options);
|
||||
registerDeprecatedEventNotifiers(mongoClient);
|
||||
}
|
||||
|
||||
// Add listeners
|
||||
addListeners(mongoClient, topology);
|
||||
|
||||
// Propagate the events to the client
|
||||
relayEvents(mongoClient, topology);
|
||||
|
||||
// Open the connection
|
||||
assignTopology(mongoClient, topology);
|
||||
|
||||
// initialize CSFLE if requested
|
||||
if (options.autoEncrypter) {
|
||||
options.autoEncrypter.init(err => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
topology.connect(options, err => {
|
||||
if (err) {
|
||||
topology.close(true);
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(undefined, topology);
|
||||
});
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// otherwise connect normally
|
||||
topology.connect(options, err => {
|
||||
if (err) {
|
||||
topology.close(true);
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
callback(undefined, topology);
|
||||
return;
|
||||
});
|
||||
}
|
||||
|
||||
function createUnifiedOptions(finalOptions, options) {
|
||||
const childOptions = [
|
||||
'mongos',
|
||||
'server',
|
||||
'db',
|
||||
'replset',
|
||||
'db_options',
|
||||
'server_options',
|
||||
'rs_options',
|
||||
'mongos_options'
|
||||
];
|
||||
const noMerge = ['readconcern', 'compression', 'autoencryption'];
|
||||
|
||||
for (const name in options) {
|
||||
if (noMerge.indexOf(name.toLowerCase()) !== -1) {
|
||||
finalOptions[name] = options[name];
|
||||
} else if (childOptions.indexOf(name.toLowerCase()) !== -1) {
|
||||
finalOptions = mergeOptions(finalOptions, options[name], false);
|
||||
} else {
|
||||
if (
|
||||
options[name] &&
|
||||
typeof options[name] === 'object' &&
|
||||
!Buffer.isBuffer(options[name]) &&
|
||||
!Array.isArray(options[name])
|
||||
) {
|
||||
finalOptions = mergeOptions(finalOptions, options[name], true);
|
||||
} else {
|
||||
finalOptions[name] = options[name];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return finalOptions;
|
||||
}
|
||||
|
||||
function generateCredentials(client, username, password, options) {
|
||||
options = Object.assign({}, options);
|
||||
|
||||
// the default db to authenticate against is 'self'
|
||||
// if authententicate is called from a retry context, it may be another one, like admin
|
||||
const source = options.authSource || options.authdb || options.dbName;
|
||||
|
||||
// authMechanism
|
||||
const authMechanismRaw = options.authMechanism || 'DEFAULT';
|
||||
const authMechanism = authMechanismRaw.toUpperCase();
|
||||
const mechanismProperties = options.authMechanismProperties;
|
||||
|
||||
if (!VALID_AUTH_MECHANISMS.has(authMechanism)) {
|
||||
throw MongoError.create({
|
||||
message: `authentication mechanism ${authMechanismRaw} not supported', options.authMechanism`,
|
||||
driver: true
|
||||
});
|
||||
}
|
||||
|
||||
return new MongoCredentials({
|
||||
mechanism: AUTH_MECHANISM_INTERNAL_MAP[authMechanism],
|
||||
mechanismProperties,
|
||||
source,
|
||||
username,
|
||||
password
|
||||
});
|
||||
}
|
||||
|
||||
function legacyTransformUrlOptions(object) {
|
||||
return mergeOptions(createUnifiedOptions({}, object), object, false);
|
||||
}
|
||||
|
||||
function mergeOptions(target, source, flatten) {
|
||||
for (const name in source) {
|
||||
if (source[name] && typeof source[name] === 'object' && flatten) {
|
||||
target = mergeOptions(target, source[name], flatten);
|
||||
} else {
|
||||
target[name] = source[name];
|
||||
}
|
||||
}
|
||||
|
||||
return target;
|
||||
}
|
||||
|
||||
function relayEvents(mongoClient, topology) {
|
||||
const serverOrCommandEvents = [
|
||||
// APM
|
||||
'commandStarted',
|
||||
'commandSucceeded',
|
||||
'commandFailed',
|
||||
|
||||
// SDAM
|
||||
'serverOpening',
|
||||
'serverClosed',
|
||||
'serverDescriptionChanged',
|
||||
'serverHeartbeatStarted',
|
||||
'serverHeartbeatSucceeded',
|
||||
'serverHeartbeatFailed',
|
||||
'topologyOpening',
|
||||
'topologyClosed',
|
||||
'topologyDescriptionChanged',
|
||||
|
||||
// Legacy
|
||||
'joined',
|
||||
'left',
|
||||
'ping',
|
||||
'ha'
|
||||
].concat(CMAP_EVENT_NAMES);
|
||||
|
||||
serverOrCommandEvents.forEach(event => {
|
||||
topology.on(event, (object1, object2) => {
|
||||
mongoClient.emit(event, object1, object2);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
//
|
||||
// Replay any events due to single server connection switching to Mongos
|
||||
//
|
||||
function replayEvents(mongoClient, events) {
|
||||
for (let i = 0; i < events.length; i++) {
|
||||
mongoClient.emit(events[i].event, events[i].object1, events[i].object2);
|
||||
}
|
||||
}
|
||||
|
||||
function transformUrlOptions(_object) {
|
||||
let object = Object.assign({ servers: _object.hosts }, _object.options);
|
||||
for (let name in object) {
|
||||
const camelCaseName = LEGACY_OPTIONS_MAP[name];
|
||||
if (camelCaseName) {
|
||||
object[camelCaseName] = object[name];
|
||||
}
|
||||
}
|
||||
|
||||
const hasUsername = _object.auth && _object.auth.username;
|
||||
const hasAuthMechanism = _object.options && _object.options.authMechanism;
|
||||
if (hasUsername || hasAuthMechanism) {
|
||||
object.auth = Object.assign({}, _object.auth);
|
||||
if (object.auth.db) {
|
||||
object.authSource = object.authSource || object.auth.db;
|
||||
}
|
||||
|
||||
if (object.auth.username) {
|
||||
object.auth.user = object.auth.username;
|
||||
}
|
||||
}
|
||||
|
||||
if (_object.defaultDatabase) {
|
||||
object.dbName = _object.defaultDatabase;
|
||||
}
|
||||
|
||||
if (object.maxPoolSize) {
|
||||
object.poolSize = object.maxPoolSize;
|
||||
}
|
||||
|
||||
if (object.readConcernLevel) {
|
||||
object.readConcern = new ReadConcern(object.readConcernLevel);
|
||||
}
|
||||
|
||||
if (object.wTimeoutMS) {
|
||||
object.wtimeout = object.wTimeoutMS;
|
||||
}
|
||||
|
||||
if (_object.srvHost) {
|
||||
object.srvHost = _object.srvHost;
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
function translateOptions(options, translationOptions) {
|
||||
translationOptions = Object.assign({}, { createServers: true }, translationOptions);
|
||||
|
||||
// If we have a readPreference passed in by the db options
|
||||
if (typeof options.readPreference === 'string' || typeof options.read_preference === 'string') {
|
||||
options.readPreference = new ReadPreference(options.readPreference || options.read_preference);
|
||||
}
|
||||
|
||||
// Do we have readPreference tags, add them
|
||||
if (options.readPreference && (options.readPreferenceTags || options.read_preference_tags)) {
|
||||
options.readPreference.tags = options.readPreferenceTags || options.read_preference_tags;
|
||||
}
|
||||
|
||||
// Do we have maxStalenessSeconds
|
||||
if (options.maxStalenessSeconds) {
|
||||
options.readPreference.maxStalenessSeconds = options.maxStalenessSeconds;
|
||||
}
|
||||
|
||||
// Set the socket and connection timeouts
|
||||
if (options.socketTimeoutMS == null) options.socketTimeoutMS = 360000;
|
||||
if (options.connectTimeoutMS == null) options.connectTimeoutMS = 10000;
|
||||
|
||||
if (!translationOptions.createServers) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Create server instances
|
||||
return options.servers.map(serverObj => {
|
||||
return serverObj.domain_socket
|
||||
? new Server(serverObj.domain_socket, 27017, options)
|
||||
: new Server(serverObj.host, serverObj.port, options);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { validOptions, connect };
|
||||
68
node_modules/mongodb/lib/operations/count.js
generated
vendored
Normal file
68
node_modules/mongodb/lib/operations/count.js
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
'use strict';
|
||||
|
||||
const buildCountCommand = require('./common_functions').buildCountCommand;
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
|
||||
class CountOperation extends OperationBase {
|
||||
constructor(cursor, applySkipLimit, options) {
|
||||
super(options);
|
||||
|
||||
this.cursor = cursor;
|
||||
this.applySkipLimit = applySkipLimit;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const cursor = this.cursor;
|
||||
const applySkipLimit = this.applySkipLimit;
|
||||
const options = this.options;
|
||||
|
||||
if (applySkipLimit) {
|
||||
if (typeof cursor.cursorSkip() === 'number') options.skip = cursor.cursorSkip();
|
||||
if (typeof cursor.cursorLimit() === 'number') options.limit = cursor.cursorLimit();
|
||||
}
|
||||
|
||||
// Ensure we have the right read preference inheritance
|
||||
if (options.readPreference) {
|
||||
cursor.setReadPreference(options.readPreference);
|
||||
}
|
||||
|
||||
if (
|
||||
typeof options.maxTimeMS !== 'number' &&
|
||||
cursor.cmd &&
|
||||
typeof cursor.cmd.maxTimeMS === 'number'
|
||||
) {
|
||||
options.maxTimeMS = cursor.cmd.maxTimeMS;
|
||||
}
|
||||
|
||||
let finalOptions = {};
|
||||
finalOptions.skip = options.skip;
|
||||
finalOptions.limit = options.limit;
|
||||
finalOptions.hint = options.hint;
|
||||
finalOptions.maxTimeMS = options.maxTimeMS;
|
||||
|
||||
// Command
|
||||
finalOptions.collectionName = cursor.namespace.collection;
|
||||
|
||||
let command;
|
||||
try {
|
||||
command = buildCountCommand(cursor, cursor.cmd.query, finalOptions);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// Set cursor server to the same as the topology
|
||||
cursor.server = cursor.topology.s.coreTopology;
|
||||
|
||||
// Execute the command
|
||||
cursor.topology.command(
|
||||
cursor.namespace.withCollection('$cmd'),
|
||||
command,
|
||||
cursor.options,
|
||||
(err, result) => {
|
||||
callback(err, result ? result.result.n : null);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CountOperation;
|
||||
41
node_modules/mongodb/lib/operations/count_documents.js
generated
vendored
Normal file
41
node_modules/mongodb/lib/operations/count_documents.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
'use strict';
|
||||
|
||||
const AggregateOperation = require('./aggregate');
|
||||
|
||||
class CountDocumentsOperation extends AggregateOperation {
|
||||
constructor(collection, query, options) {
|
||||
const pipeline = [{ $match: query }];
|
||||
if (typeof options.skip === 'number') {
|
||||
pipeline.push({ $skip: options.skip });
|
||||
}
|
||||
|
||||
if (typeof options.limit === 'number') {
|
||||
pipeline.push({ $limit: options.limit });
|
||||
}
|
||||
|
||||
pipeline.push({ $group: { _id: 1, n: { $sum: 1 } } });
|
||||
|
||||
super(collection, pipeline, options);
|
||||
}
|
||||
|
||||
execute(server, callback) {
|
||||
super.execute(server, (err, result) => {
|
||||
if (err) {
|
||||
callback(err, null);
|
||||
return;
|
||||
}
|
||||
|
||||
// NOTE: We're avoiding creating a cursor here to reduce the callstack.
|
||||
const response = result.result;
|
||||
if (response.cursor == null || response.cursor.firstBatch == null) {
|
||||
callback(null, 0);
|
||||
return;
|
||||
}
|
||||
|
||||
const docs = response.cursor.firstBatch;
|
||||
callback(null, docs.length ? docs[0].n : 0);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = CountDocumentsOperation;
|
||||
102
node_modules/mongodb/lib/operations/create_collection.js
generated
vendored
Normal file
102
node_modules/mongodb/lib/operations/create_collection.js
generated
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const CommandOperation = require('./command');
|
||||
const applyWriteConcern = require('../utils').applyWriteConcern;
|
||||
const loadCollection = require('../dynamic_loaders').loadCollection;
|
||||
const MongoError = require('../core').MongoError;
|
||||
const ReadPreference = require('../core').ReadPreference;
|
||||
|
||||
const ILLEGAL_COMMAND_FIELDS = new Set([
|
||||
'w',
|
||||
'wtimeout',
|
||||
'j',
|
||||
'fsync',
|
||||
'autoIndexId',
|
||||
'strict',
|
||||
'serializeFunctions',
|
||||
'pkFactory',
|
||||
'raw',
|
||||
'readPreference',
|
||||
'session',
|
||||
'readConcern',
|
||||
'writeConcern'
|
||||
]);
|
||||
|
||||
class CreateCollectionOperation extends CommandOperation {
|
||||
constructor(db, name, options) {
|
||||
super(db, options);
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
_buildCommand() {
|
||||
const name = this.name;
|
||||
const options = this.options;
|
||||
|
||||
const cmd = { create: name };
|
||||
for (let n in options) {
|
||||
if (
|
||||
options[n] != null &&
|
||||
typeof options[n] !== 'function' &&
|
||||
!ILLEGAL_COMMAND_FIELDS.has(n)
|
||||
) {
|
||||
cmd[n] = options[n];
|
||||
}
|
||||
}
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const db = this.db;
|
||||
const name = this.name;
|
||||
const options = this.options;
|
||||
const Collection = loadCollection();
|
||||
|
||||
let listCollectionOptions = Object.assign({ nameOnly: true, strict: false }, options);
|
||||
listCollectionOptions = applyWriteConcern(listCollectionOptions, { db }, listCollectionOptions);
|
||||
|
||||
function done(err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
try {
|
||||
callback(
|
||||
null,
|
||||
new Collection(db, db.s.topology, db.databaseName, name, db.s.pkFactory, options)
|
||||
);
|
||||
} catch (err) {
|
||||
callback(err);
|
||||
}
|
||||
}
|
||||
|
||||
const strictMode = listCollectionOptions.strict;
|
||||
if (strictMode) {
|
||||
db.listCollections({ name }, listCollectionOptions)
|
||||
.setReadPreference(ReadPreference.PRIMARY)
|
||||
.toArray((err, collections) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
if (collections.length > 0) {
|
||||
return callback(
|
||||
new MongoError(`Collection ${name} already exists. Currently in strict mode.`)
|
||||
);
|
||||
}
|
||||
|
||||
super.execute(done);
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// otherwise just execute the command
|
||||
super.execute(done);
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(CreateCollectionOperation, Aspect.WRITE_OPERATION);
|
||||
module.exports = CreateCollectionOperation;
|
||||
137
node_modules/mongodb/lib/operations/create_indexes.js
generated
vendored
Normal file
137
node_modules/mongodb/lib/operations/create_indexes.js
generated
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const CommandOperationV2 = require('./command_v2');
|
||||
const MongoError = require('../core').MongoError;
|
||||
const parseIndexOptions = require('../utils').parseIndexOptions;
|
||||
const maxWireVersion = require('../core/utils').maxWireVersion;
|
||||
|
||||
const VALID_INDEX_OPTIONS = new Set([
|
||||
'background',
|
||||
'unique',
|
||||
'name',
|
||||
'partialFilterExpression',
|
||||
'sparse',
|
||||
'expireAfterSeconds',
|
||||
'storageEngine',
|
||||
'collation',
|
||||
|
||||
// text indexes
|
||||
'weights',
|
||||
'default_language',
|
||||
'language_override',
|
||||
'textIndexVersion',
|
||||
|
||||
// 2d-sphere indexes
|
||||
'2dsphereIndexVersion',
|
||||
|
||||
// 2d indexes
|
||||
'bits',
|
||||
'min',
|
||||
'max',
|
||||
|
||||
// geoHaystack Indexes
|
||||
'bucketSize',
|
||||
|
||||
// wildcard indexes
|
||||
'wildcardProjection'
|
||||
]);
|
||||
|
||||
class CreateIndexesOperation extends CommandOperationV2 {
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
constructor(parent, collection, indexes, options) {
|
||||
super(parent, options);
|
||||
this.collection = collection;
|
||||
|
||||
// createIndex can be called with a variety of styles:
|
||||
// coll.createIndex('a');
|
||||
// coll.createIndex({ a: 1 });
|
||||
// coll.createIndex([['a', 1]]);
|
||||
// createIndexes is always called with an array of index spec objects
|
||||
if (!Array.isArray(indexes) || Array.isArray(indexes[0])) {
|
||||
this.onlyReturnNameOfCreatedIndex = true;
|
||||
// TODO: remove in v4 (breaking change); make createIndex return full response as createIndexes does
|
||||
|
||||
const indexParameters = parseIndexOptions(indexes);
|
||||
// Generate the index name
|
||||
const name = typeof options.name === 'string' ? options.name : indexParameters.name;
|
||||
// Set up the index
|
||||
const indexSpec = { name, key: indexParameters.fieldHash };
|
||||
// merge valid index options into the index spec
|
||||
for (let optionName in options) {
|
||||
if (VALID_INDEX_OPTIONS.has(optionName)) {
|
||||
indexSpec[optionName] = options[optionName];
|
||||
}
|
||||
}
|
||||
this.indexes = [indexSpec];
|
||||
return;
|
||||
}
|
||||
|
||||
this.indexes = indexes;
|
||||
}
|
||||
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
execute(server, callback) {
|
||||
const options = this.options;
|
||||
const indexes = this.indexes;
|
||||
|
||||
const serverWireVersion = maxWireVersion(server);
|
||||
|
||||
// Ensure we generate the correct name if the parameter is not set
|
||||
for (let i = 0; i < indexes.length; i++) {
|
||||
// Did the user pass in a collation, check if our write server supports it
|
||||
if (indexes[i].collation && serverWireVersion < 5) {
|
||||
callback(
|
||||
new MongoError(
|
||||
`Server ${server.name}, which reports wire version ${serverWireVersion}, does not support collation`
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (indexes[i].name == null) {
|
||||
const keys = [];
|
||||
|
||||
for (let name in indexes[i].key) {
|
||||
keys.push(`${name}_${indexes[i].key[name]}`);
|
||||
}
|
||||
|
||||
// Set the name
|
||||
indexes[i].name = keys.join('_');
|
||||
}
|
||||
}
|
||||
|
||||
const cmd = { createIndexes: this.collection, indexes };
|
||||
|
||||
if (options.commitQuorum != null) {
|
||||
if (serverWireVersion < 9) {
|
||||
callback(
|
||||
new MongoError('`commitQuorum` option for `createIndexes` not supported on servers < 4.4')
|
||||
);
|
||||
return;
|
||||
}
|
||||
cmd.commitQuorum = options.commitQuorum;
|
||||
}
|
||||
|
||||
// collation is set on each index, it should not be defined at the root
|
||||
this.options.collation = undefined;
|
||||
|
||||
super.executeCommand(server, cmd, (err, result) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(null, this.onlyReturnNameOfCreatedIndex ? indexes[0].name : result);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(CreateIndexesOperation, [Aspect.WRITE_OPERATION, Aspect.EXECUTE_WITH_SELECTION]);
|
||||
|
||||
module.exports = CreateIndexesOperation;
|
||||
167
node_modules/mongodb/lib/operations/cursor_ops.js
generated
vendored
Normal file
167
node_modules/mongodb/lib/operations/cursor_ops.js
generated
vendored
Normal file
@@ -0,0 +1,167 @@
|
||||
'use strict';
|
||||
|
||||
const buildCountCommand = require('./collection_ops').buildCountCommand;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const MongoError = require('../core').MongoError;
|
||||
const push = Array.prototype.push;
|
||||
const CursorState = require('../core/cursor').CursorState;
|
||||
|
||||
/**
|
||||
* Get the count of documents for this cursor.
|
||||
*
|
||||
* @method
|
||||
* @param {Cursor} cursor The Cursor instance on which to count.
|
||||
* @param {boolean} [applySkipLimit=true] Specifies whether the count command apply limit and skip settings should be applied on the cursor or in the provided options.
|
||||
* @param {object} [options] Optional settings. See Cursor.prototype.count for a list of options.
|
||||
* @param {Cursor~countResultCallback} [callback] The result callback.
|
||||
*/
|
||||
function count(cursor, applySkipLimit, opts, callback) {
|
||||
if (applySkipLimit) {
|
||||
if (typeof cursor.cursorSkip() === 'number') opts.skip = cursor.cursorSkip();
|
||||
if (typeof cursor.cursorLimit() === 'number') opts.limit = cursor.cursorLimit();
|
||||
}
|
||||
|
||||
// Ensure we have the right read preference inheritance
|
||||
if (opts.readPreference) {
|
||||
cursor.setReadPreference(opts.readPreference);
|
||||
}
|
||||
|
||||
if (
|
||||
typeof opts.maxTimeMS !== 'number' &&
|
||||
cursor.cmd &&
|
||||
typeof cursor.cmd.maxTimeMS === 'number'
|
||||
) {
|
||||
opts.maxTimeMS = cursor.cmd.maxTimeMS;
|
||||
}
|
||||
|
||||
let options = {};
|
||||
options.skip = opts.skip;
|
||||
options.limit = opts.limit;
|
||||
options.hint = opts.hint;
|
||||
options.maxTimeMS = opts.maxTimeMS;
|
||||
|
||||
// Command
|
||||
options.collectionName = cursor.namespace.collection;
|
||||
|
||||
let command;
|
||||
try {
|
||||
command = buildCountCommand(cursor, cursor.cmd.query, options);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// Set cursor server to the same as the topology
|
||||
cursor.server = cursor.topology.s.coreTopology;
|
||||
|
||||
// Execute the command
|
||||
cursor.topology.command(
|
||||
cursor.namespace.withCollection('$cmd'),
|
||||
command,
|
||||
cursor.options,
|
||||
(err, result) => {
|
||||
callback(err, result ? result.result.n : null);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over all the documents for this cursor. See Cursor.prototype.each for more information.
|
||||
*
|
||||
* @method
|
||||
* @deprecated
|
||||
* @param {Cursor} cursor The Cursor instance on which to run.
|
||||
* @param {Cursor~resultCallback} callback The result callback.
|
||||
*/
|
||||
function each(cursor, callback) {
|
||||
if (!callback) throw MongoError.create({ message: 'callback is mandatory', driver: true });
|
||||
if (cursor.isNotified()) return;
|
||||
if (cursor.s.state === CursorState.CLOSED || cursor.isDead()) {
|
||||
return handleCallback(
|
||||
callback,
|
||||
MongoError.create({ message: 'Cursor is closed', driver: true })
|
||||
);
|
||||
}
|
||||
|
||||
if (cursor.s.state === CursorState.INIT) {
|
||||
cursor.s.state = CursorState.OPEN;
|
||||
}
|
||||
|
||||
// Define function to avoid global scope escape
|
||||
let fn = null;
|
||||
// Trampoline all the entries
|
||||
if (cursor.bufferedCount() > 0) {
|
||||
while ((fn = loop(cursor, callback))) fn(cursor, callback);
|
||||
each(cursor, callback);
|
||||
} else {
|
||||
cursor.next((err, item) => {
|
||||
if (err) return handleCallback(callback, err);
|
||||
if (item == null) {
|
||||
return cursor.close({ skipKillCursors: true }, () => handleCallback(callback, null, null));
|
||||
}
|
||||
|
||||
if (handleCallback(callback, null, item) === false) return;
|
||||
each(cursor, callback);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Trampoline emptying the number of retrieved items
|
||||
// without incurring a nextTick operation
|
||||
function loop(cursor, callback) {
|
||||
// No more items we are done
|
||||
if (cursor.bufferedCount() === 0) return;
|
||||
// Get the next document
|
||||
cursor._next(callback);
|
||||
// Loop
|
||||
return loop;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of documents. See Cursor.prototype.toArray for more information.
|
||||
*
|
||||
* @method
|
||||
* @param {Cursor} cursor The Cursor instance from which to get the next document.
|
||||
* @param {Cursor~toArrayResultCallback} [callback] The result callback.
|
||||
*/
|
||||
function toArray(cursor, callback) {
|
||||
const items = [];
|
||||
|
||||
// Reset cursor
|
||||
cursor.rewind();
|
||||
cursor.s.state = CursorState.INIT;
|
||||
|
||||
// Fetch all the documents
|
||||
const fetchDocs = () => {
|
||||
cursor._next((err, doc) => {
|
||||
if (err) {
|
||||
return handleCallback(callback, err);
|
||||
}
|
||||
|
||||
if (doc == null) {
|
||||
return cursor.close({ skipKillCursors: true }, () => handleCallback(callback, null, items));
|
||||
}
|
||||
|
||||
// Add doc to items
|
||||
items.push(doc);
|
||||
|
||||
// Get all buffered objects
|
||||
if (cursor.bufferedCount() > 0) {
|
||||
let docs = cursor.readBufferedDocuments(cursor.bufferedCount());
|
||||
|
||||
// Transform the doc if transform method added
|
||||
if (cursor.s.transforms && typeof cursor.s.transforms.doc === 'function') {
|
||||
docs = docs.map(cursor.s.transforms.doc);
|
||||
}
|
||||
|
||||
push.apply(items, docs);
|
||||
}
|
||||
|
||||
// Attempt a fetch
|
||||
fetchDocs();
|
||||
});
|
||||
};
|
||||
|
||||
fetchDocs();
|
||||
}
|
||||
|
||||
module.exports = { count, each, toArray };
|
||||
467
node_modules/mongodb/lib/operations/db_ops.js
generated
vendored
Normal file
467
node_modules/mongodb/lib/operations/db_ops.js
generated
vendored
Normal file
@@ -0,0 +1,467 @@
|
||||
'use strict';
|
||||
|
||||
const applyWriteConcern = require('../utils').applyWriteConcern;
|
||||
const Code = require('../core').BSON.Code;
|
||||
const debugOptions = require('../utils').debugOptions;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const MongoError = require('../core').MongoError;
|
||||
const parseIndexOptions = require('../utils').parseIndexOptions;
|
||||
const ReadPreference = require('../core').ReadPreference;
|
||||
const toError = require('../utils').toError;
|
||||
const CONSTANTS = require('../constants');
|
||||
const MongoDBNamespace = require('../utils').MongoDBNamespace;
|
||||
|
||||
const debugFields = [
|
||||
'authSource',
|
||||
'w',
|
||||
'wtimeout',
|
||||
'j',
|
||||
'native_parser',
|
||||
'forceServerObjectId',
|
||||
'serializeFunctions',
|
||||
'raw',
|
||||
'promoteLongs',
|
||||
'promoteValues',
|
||||
'promoteBuffers',
|
||||
'bufferMaxEntries',
|
||||
'numberOfRetries',
|
||||
'retryMiliSeconds',
|
||||
'readPreference',
|
||||
'pkFactory',
|
||||
'parentDb',
|
||||
'promiseLibrary',
|
||||
'noListener'
|
||||
];
|
||||
|
||||
/**
|
||||
* Creates an index on the db and collection.
|
||||
* @method
|
||||
* @param {Db} db The Db instance on which to create an index.
|
||||
* @param {string} name Name of the collection to create the index on.
|
||||
* @param {(string|object)} fieldOrSpec Defines the index.
|
||||
* @param {object} [options] Optional settings. See Db.prototype.createIndex for a list of options.
|
||||
* @param {Db~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
function createIndex(db, name, fieldOrSpec, options, callback) {
|
||||
// Get the write concern options
|
||||
let finalOptions = Object.assign({}, { readPreference: ReadPreference.PRIMARY }, options);
|
||||
finalOptions = applyWriteConcern(finalOptions, { db }, options);
|
||||
|
||||
// Ensure we have a callback
|
||||
if (finalOptions.writeConcern && typeof callback !== 'function') {
|
||||
throw MongoError.create({
|
||||
message: 'Cannot use a writeConcern without a provided callback',
|
||||
driver: true
|
||||
});
|
||||
}
|
||||
|
||||
// Did the user destroy the topology
|
||||
if (db.serverConfig && db.serverConfig.isDestroyed())
|
||||
return callback(new MongoError('topology was destroyed'));
|
||||
|
||||
// Attempt to run using createIndexes command
|
||||
createIndexUsingCreateIndexes(db, name, fieldOrSpec, finalOptions, (err, result) => {
|
||||
if (err == null) return handleCallback(callback, err, result);
|
||||
|
||||
/**
|
||||
* The following errors mean that the server recognized `createIndex` as a command so we don't need to fallback to an insert:
|
||||
* 67 = 'CannotCreateIndex' (malformed index options)
|
||||
* 85 = 'IndexOptionsConflict' (index already exists with different options)
|
||||
* 86 = 'IndexKeySpecsConflict' (index already exists with the same name)
|
||||
* 11000 = 'DuplicateKey' (couldn't build unique index because of dupes)
|
||||
* 11600 = 'InterruptedAtShutdown' (interrupted at shutdown)
|
||||
* 197 = 'InvalidIndexSpecificationOption' (`_id` with `background: true`)
|
||||
*/
|
||||
if (
|
||||
err.code === 67 ||
|
||||
err.code === 11000 ||
|
||||
err.code === 85 ||
|
||||
err.code === 86 ||
|
||||
err.code === 11600 ||
|
||||
err.code === 197
|
||||
) {
|
||||
return handleCallback(callback, err, result);
|
||||
}
|
||||
|
||||
// Create command
|
||||
const doc = createCreateIndexCommand(db, name, fieldOrSpec, options);
|
||||
// Set no key checking
|
||||
finalOptions.checkKeys = false;
|
||||
// Insert document
|
||||
db.s.topology.insert(
|
||||
db.s.namespace.withCollection(CONSTANTS.SYSTEM_INDEX_COLLECTION),
|
||||
doc,
|
||||
finalOptions,
|
||||
(err, result) => {
|
||||
if (callback == null) return;
|
||||
if (err) return handleCallback(callback, err);
|
||||
if (result == null) return handleCallback(callback, null, null);
|
||||
if (result.result.writeErrors)
|
||||
return handleCallback(callback, MongoError.create(result.result.writeErrors[0]), null);
|
||||
handleCallback(callback, null, doc.name);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// Add listeners to topology
|
||||
function createListener(db, e, object) {
|
||||
function listener(err) {
|
||||
if (object.listeners(e).length > 0) {
|
||||
object.emit(e, err, db);
|
||||
|
||||
// Emit on all associated db's if available
|
||||
for (let i = 0; i < db.s.children.length; i++) {
|
||||
db.s.children[i].emit(e, err, db.s.children[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
return listener;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that an index exists. If it does not, creates it.
|
||||
*
|
||||
* @method
|
||||
* @param {Db} db The Db instance on which to ensure the index.
|
||||
* @param {string} name The index name
|
||||
* @param {(string|object)} fieldOrSpec Defines the index.
|
||||
* @param {object} [options] Optional settings. See Db.prototype.ensureIndex for a list of options.
|
||||
* @param {Db~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
function ensureIndex(db, name, fieldOrSpec, options, callback) {
|
||||
// Get the write concern options
|
||||
const finalOptions = applyWriteConcern({}, { db }, options);
|
||||
// Create command
|
||||
const selector = createCreateIndexCommand(db, name, fieldOrSpec, options);
|
||||
const index_name = selector.name;
|
||||
|
||||
// Did the user destroy the topology
|
||||
if (db.serverConfig && db.serverConfig.isDestroyed())
|
||||
return callback(new MongoError('topology was destroyed'));
|
||||
|
||||
// Merge primary readPreference
|
||||
finalOptions.readPreference = ReadPreference.PRIMARY;
|
||||
|
||||
// Check if the index already exists
|
||||
indexInformation(db, name, finalOptions, (err, indexInformation) => {
|
||||
if (err != null && err.code !== 26) return handleCallback(callback, err, null);
|
||||
// If the index does not exist, create it
|
||||
if (indexInformation == null || !indexInformation[index_name]) {
|
||||
createIndex(db, name, fieldOrSpec, options, callback);
|
||||
} else {
|
||||
if (typeof callback === 'function') return handleCallback(callback, null, index_name);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate JavaScript on the server
|
||||
*
|
||||
* @method
|
||||
* @param {Db} db The Db instance.
|
||||
* @param {Code} code JavaScript to execute on server.
|
||||
* @param {(object|array)} parameters The parameters for the call.
|
||||
* @param {object} [options] Optional settings. See Db.prototype.eval for a list of options.
|
||||
* @param {Db~resultCallback} [callback] The results callback
|
||||
* @deprecated Eval is deprecated on MongoDB 3.2 and forward
|
||||
*/
|
||||
function evaluate(db, code, parameters, options, callback) {
|
||||
let finalCode = code;
|
||||
let finalParameters = [];
|
||||
|
||||
// Did the user destroy the topology
|
||||
if (db.serverConfig && db.serverConfig.isDestroyed())
|
||||
return callback(new MongoError('topology was destroyed'));
|
||||
|
||||
// If not a code object translate to one
|
||||
if (!(finalCode && finalCode._bsontype === 'Code')) finalCode = new Code(finalCode);
|
||||
// Ensure the parameters are correct
|
||||
if (parameters != null && !Array.isArray(parameters) && typeof parameters !== 'function') {
|
||||
finalParameters = [parameters];
|
||||
} else if (parameters != null && Array.isArray(parameters) && typeof parameters !== 'function') {
|
||||
finalParameters = parameters;
|
||||
}
|
||||
|
||||
// Create execution selector
|
||||
let cmd = { $eval: finalCode, args: finalParameters };
|
||||
// Check if the nolock parameter is passed in
|
||||
if (options['nolock']) {
|
||||
cmd['nolock'] = options['nolock'];
|
||||
}
|
||||
|
||||
// Set primary read preference
|
||||
options.readPreference = new ReadPreference(ReadPreference.PRIMARY);
|
||||
|
||||
// Execute the command
|
||||
executeCommand(db, cmd, options, (err, result) => {
|
||||
if (err) return handleCallback(callback, err, null);
|
||||
if (result && result.ok === 1) return handleCallback(callback, null, result.retval);
|
||||
if (result)
|
||||
return handleCallback(
|
||||
callback,
|
||||
MongoError.create({ message: `eval failed: ${result.errmsg}`, driver: true }),
|
||||
null
|
||||
);
|
||||
handleCallback(callback, err, result);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a command
|
||||
*
|
||||
* @method
|
||||
* @param {Db} db The Db instance on which to execute the command.
|
||||
* @param {object} command The command hash
|
||||
* @param {object} [options] Optional settings. See Db.prototype.command for a list of options.
|
||||
* @param {Db~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
function executeCommand(db, command, options, callback) {
|
||||
// Did the user destroy the topology
|
||||
if (db.serverConfig && db.serverConfig.isDestroyed())
|
||||
return callback(new MongoError('topology was destroyed'));
|
||||
// Get the db name we are executing against
|
||||
const dbName = options.dbName || options.authdb || db.databaseName;
|
||||
|
||||
// Convert the readPreference if its not a write
|
||||
options.readPreference = ReadPreference.resolve(db, options);
|
||||
|
||||
// Debug information
|
||||
if (db.s.logger.isDebug())
|
||||
db.s.logger.debug(
|
||||
`executing command ${JSON.stringify(
|
||||
command
|
||||
)} against ${dbName}.$cmd with options [${JSON.stringify(
|
||||
debugOptions(debugFields, options)
|
||||
)}]`
|
||||
);
|
||||
|
||||
// Execute command
|
||||
db.s.topology.command(db.s.namespace.withCollection('$cmd'), command, options, (err, result) => {
|
||||
if (err) return handleCallback(callback, err);
|
||||
if (options.full) return handleCallback(callback, null, result);
|
||||
handleCallback(callback, null, result.result);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs a command on the database as admin.
|
||||
*
|
||||
* @method
|
||||
* @param {Db} db The Db instance on which to execute the command.
|
||||
* @param {object} command The command hash
|
||||
* @param {object} [options] Optional settings. See Db.prototype.executeDbAdminCommand for a list of options.
|
||||
* @param {Db~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
function executeDbAdminCommand(db, command, options, callback) {
|
||||
const namespace = new MongoDBNamespace('admin', '$cmd');
|
||||
|
||||
db.s.topology.command(namespace, command, options, (err, result) => {
|
||||
// Did the user destroy the topology
|
||||
if (db.serverConfig && db.serverConfig.isDestroyed()) {
|
||||
return callback(new MongoError('topology was destroyed'));
|
||||
}
|
||||
|
||||
if (err) return handleCallback(callback, err);
|
||||
handleCallback(callback, null, result.result);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves this collections index info.
|
||||
*
|
||||
* @method
|
||||
* @param {Db} db The Db instance on which to retrieve the index info.
|
||||
* @param {string} name The name of the collection.
|
||||
* @param {object} [options] Optional settings. See Db.prototype.indexInformation for a list of options.
|
||||
* @param {Db~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
function indexInformation(db, name, options, callback) {
|
||||
// If we specified full information
|
||||
const full = options['full'] == null ? false : options['full'];
|
||||
|
||||
// Did the user destroy the topology
|
||||
if (db.serverConfig && db.serverConfig.isDestroyed())
|
||||
return callback(new MongoError('topology was destroyed'));
|
||||
// Process all the results from the index command and collection
|
||||
function processResults(indexes) {
|
||||
// Contains all the information
|
||||
let info = {};
|
||||
// Process all the indexes
|
||||
for (let i = 0; i < indexes.length; i++) {
|
||||
const index = indexes[i];
|
||||
// Let's unpack the object
|
||||
info[index.name] = [];
|
||||
for (let name in index.key) {
|
||||
info[index.name].push([name, index.key[name]]);
|
||||
}
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
// Get the list of indexes of the specified collection
|
||||
db.collection(name)
|
||||
.listIndexes(options)
|
||||
.toArray((err, indexes) => {
|
||||
if (err) return callback(toError(err));
|
||||
if (!Array.isArray(indexes)) return handleCallback(callback, null, []);
|
||||
if (full) return handleCallback(callback, null, indexes);
|
||||
handleCallback(callback, null, processResults(indexes));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the current profiling information for MongoDB
|
||||
*
|
||||
* @method
|
||||
* @param {Db} db The Db instance on which to retrieve the profiling info.
|
||||
* @param {Object} [options] Optional settings. See Db.protoype.profilingInfo for a list of options.
|
||||
* @param {Db~resultCallback} [callback] The command result callback.
|
||||
* @deprecated Query the system.profile collection directly.
|
||||
*/
|
||||
function profilingInfo(db, options, callback) {
|
||||
try {
|
||||
db.collection('system.profile')
|
||||
.find({}, options)
|
||||
.toArray(callback);
|
||||
} catch (err) {
|
||||
return callback(err, null);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate the database name
|
||||
function validateDatabaseName(databaseName) {
|
||||
if (typeof databaseName !== 'string')
|
||||
throw MongoError.create({ message: 'database name must be a string', driver: true });
|
||||
if (databaseName.length === 0)
|
||||
throw MongoError.create({ message: 'database name cannot be the empty string', driver: true });
|
||||
if (databaseName === '$external') return;
|
||||
|
||||
const invalidChars = [' ', '.', '$', '/', '\\'];
|
||||
for (let i = 0; i < invalidChars.length; i++) {
|
||||
if (databaseName.indexOf(invalidChars[i]) !== -1)
|
||||
throw MongoError.create({
|
||||
message: "database names cannot contain the character '" + invalidChars[i] + "'",
|
||||
driver: true
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the command object for Db.prototype.createIndex.
|
||||
*
|
||||
* @param {Db} db The Db instance on which to create the command.
|
||||
* @param {string} name Name of the collection to create the index on.
|
||||
* @param {(string|object)} fieldOrSpec Defines the index.
|
||||
* @param {Object} [options] Optional settings. See Db.prototype.createIndex for a list of options.
|
||||
* @return {Object} The insert command object.
|
||||
*/
|
||||
function createCreateIndexCommand(db, name, fieldOrSpec, options) {
|
||||
const indexParameters = parseIndexOptions(fieldOrSpec);
|
||||
const fieldHash = indexParameters.fieldHash;
|
||||
|
||||
// Generate the index name
|
||||
const indexName = typeof options.name === 'string' ? options.name : indexParameters.name;
|
||||
const selector = {
|
||||
ns: db.s.namespace.withCollection(name).toString(),
|
||||
key: fieldHash,
|
||||
name: indexName
|
||||
};
|
||||
|
||||
// Ensure we have a correct finalUnique
|
||||
const finalUnique = options == null || 'object' === typeof options ? false : options;
|
||||
// Set up options
|
||||
options = options == null || typeof options === 'boolean' ? {} : options;
|
||||
|
||||
// Add all the options
|
||||
const keysToOmit = Object.keys(selector);
|
||||
for (let optionName in options) {
|
||||
if (keysToOmit.indexOf(optionName) === -1) {
|
||||
selector[optionName] = options[optionName];
|
||||
}
|
||||
}
|
||||
|
||||
if (selector['unique'] == null) selector['unique'] = finalUnique;
|
||||
|
||||
// Remove any write concern operations
|
||||
const removeKeys = ['w', 'wtimeout', 'j', 'fsync', 'readPreference', 'session'];
|
||||
for (let i = 0; i < removeKeys.length; i++) {
|
||||
delete selector[removeKeys[i]];
|
||||
}
|
||||
|
||||
// Return the command creation selector
|
||||
return selector;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create index using the createIndexes command.
|
||||
*
|
||||
* @param {Db} db The Db instance on which to execute the command.
|
||||
* @param {string} name Name of the collection to create the index on.
|
||||
* @param {(string|object)} fieldOrSpec Defines the index.
|
||||
* @param {Object} [options] Optional settings. See Db.prototype.createIndex for a list of options.
|
||||
* @param {Db~resultCallback} [callback] The command result callback.
|
||||
*/
|
||||
function createIndexUsingCreateIndexes(db, name, fieldOrSpec, options, callback) {
|
||||
// Build the index
|
||||
const indexParameters = parseIndexOptions(fieldOrSpec);
|
||||
// Generate the index name
|
||||
const indexName = typeof options.name === 'string' ? options.name : indexParameters.name;
|
||||
// Set up the index
|
||||
const indexes = [{ name: indexName, key: indexParameters.fieldHash }];
|
||||
// merge all the options
|
||||
const keysToOmit = Object.keys(indexes[0]).concat([
|
||||
'writeConcern',
|
||||
'w',
|
||||
'wtimeout',
|
||||
'j',
|
||||
'fsync',
|
||||
'readPreference',
|
||||
'session'
|
||||
]);
|
||||
|
||||
for (let optionName in options) {
|
||||
if (keysToOmit.indexOf(optionName) === -1) {
|
||||
indexes[0][optionName] = options[optionName];
|
||||
}
|
||||
}
|
||||
|
||||
// Get capabilities
|
||||
const capabilities = db.s.topology.capabilities();
|
||||
|
||||
// Did the user pass in a collation, check if our write server supports it
|
||||
if (indexes[0].collation && capabilities && !capabilities.commandsTakeCollation) {
|
||||
// Create a new error
|
||||
const error = new MongoError('server/primary/mongos does not support collation');
|
||||
error.code = 67;
|
||||
// Return the error
|
||||
return callback(error);
|
||||
}
|
||||
|
||||
// Create command, apply write concern to command
|
||||
const cmd = applyWriteConcern({ createIndexes: name, indexes }, { db }, options);
|
||||
|
||||
// ReadPreference primary
|
||||
options.readPreference = ReadPreference.PRIMARY;
|
||||
|
||||
// Build the command
|
||||
executeCommand(db, cmd, options, (err, result) => {
|
||||
if (err) return handleCallback(callback, err, null);
|
||||
if (result.ok === 0) return handleCallback(callback, toError(result), null);
|
||||
// Return the indexName for backward compatibility
|
||||
handleCallback(callback, null, indexName);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createListener,
|
||||
createIndex,
|
||||
ensureIndex,
|
||||
evaluate,
|
||||
executeCommand,
|
||||
executeDbAdminCommand,
|
||||
indexInformation,
|
||||
profilingInfo,
|
||||
validateDatabaseName
|
||||
};
|
||||
25
node_modules/mongodb/lib/operations/delete_many.js
generated
vendored
Normal file
25
node_modules/mongodb/lib/operations/delete_many.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const deleteCallback = require('./common_functions').deleteCallback;
|
||||
const removeDocuments = require('./common_functions').removeDocuments;
|
||||
|
||||
class DeleteManyOperation extends OperationBase {
|
||||
constructor(collection, filter, options) {
|
||||
super(options);
|
||||
|
||||
this.collection = collection;
|
||||
this.filter = filter;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
const filter = this.filter;
|
||||
const options = this.options;
|
||||
|
||||
options.single = false;
|
||||
removeDocuments(coll, filter, options, (err, r) => deleteCallback(err, r, callback));
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DeleteManyOperation;
|
||||
25
node_modules/mongodb/lib/operations/delete_one.js
generated
vendored
Normal file
25
node_modules/mongodb/lib/operations/delete_one.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const deleteCallback = require('./common_functions').deleteCallback;
|
||||
const removeDocuments = require('./common_functions').removeDocuments;
|
||||
|
||||
class DeleteOneOperation extends OperationBase {
|
||||
constructor(collection, filter, options) {
|
||||
super(options);
|
||||
|
||||
this.collection = collection;
|
||||
this.filter = filter;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
const filter = this.filter;
|
||||
const options = this.options;
|
||||
|
||||
options.single = true;
|
||||
removeDocuments(coll, filter, options, (err, r) => deleteCallback(err, r, callback));
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DeleteOneOperation;
|
||||
85
node_modules/mongodb/lib/operations/distinct.js
generated
vendored
Normal file
85
node_modules/mongodb/lib/operations/distinct.js
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const CommandOperationV2 = require('./command_v2');
|
||||
const decorateWithCollation = require('../utils').decorateWithCollation;
|
||||
const decorateWithReadConcern = require('../utils').decorateWithReadConcern;
|
||||
|
||||
/**
|
||||
* Return a list of distinct values for the given key across a collection.
|
||||
*
|
||||
* @class
|
||||
* @property {Collection} a Collection instance.
|
||||
* @property {string} key Field of the document to find distinct values for.
|
||||
* @property {object} query The query for filtering the set of documents to which we apply the distinct filter.
|
||||
* @property {object} [options] Optional settings. See Collection.prototype.distinct for a list of options.
|
||||
*/
|
||||
class DistinctOperation extends CommandOperationV2 {
|
||||
/**
|
||||
* Construct a Distinct operation.
|
||||
*
|
||||
* @param {Collection} a Collection instance.
|
||||
* @param {string} key Field of the document to find distinct values for.
|
||||
* @param {object} query The query for filtering the set of documents to which we apply the distinct filter.
|
||||
* @param {object} [options] Optional settings. See Collection.prototype.distinct for a list of options.
|
||||
*/
|
||||
constructor(collection, key, query, options) {
|
||||
super(collection, options);
|
||||
|
||||
this.collection = collection;
|
||||
this.key = key;
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the operation.
|
||||
*
|
||||
* @param {Collection~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
execute(server, callback) {
|
||||
const coll = this.collection;
|
||||
const key = this.key;
|
||||
const query = this.query;
|
||||
const options = this.options;
|
||||
|
||||
// Distinct command
|
||||
const cmd = {
|
||||
distinct: coll.collectionName,
|
||||
key: key,
|
||||
query: query
|
||||
};
|
||||
|
||||
// Add maxTimeMS if defined
|
||||
if (typeof options.maxTimeMS === 'number') {
|
||||
cmd.maxTimeMS = options.maxTimeMS;
|
||||
}
|
||||
|
||||
// Do we have a readConcern specified
|
||||
decorateWithReadConcern(cmd, coll, options);
|
||||
|
||||
// Have we specified collation
|
||||
try {
|
||||
decorateWithCollation(cmd, coll, options);
|
||||
} catch (err) {
|
||||
return callback(err, null);
|
||||
}
|
||||
|
||||
super.executeCommand(server, cmd, (err, result) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(null, this.options.full ? result : result.values);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(DistinctOperation, [
|
||||
Aspect.READ_OPERATION,
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.EXECUTE_WITH_SELECTION
|
||||
]);
|
||||
|
||||
module.exports = DistinctOperation;
|
||||
53
node_modules/mongodb/lib/operations/drop.js
generated
vendored
Normal file
53
node_modules/mongodb/lib/operations/drop.js
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const CommandOperation = require('./command');
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
|
||||
class DropOperation extends CommandOperation {
|
||||
constructor(db, options) {
|
||||
const finalOptions = Object.assign({}, options, db.s.options);
|
||||
|
||||
if (options.session) {
|
||||
finalOptions.session = options.session;
|
||||
}
|
||||
|
||||
super(db, finalOptions);
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
super.execute((err, result) => {
|
||||
if (err) return handleCallback(callback, err);
|
||||
if (result.ok) return handleCallback(callback, null, true);
|
||||
handleCallback(callback, null, false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(DropOperation, Aspect.WRITE_OPERATION);
|
||||
|
||||
class DropCollectionOperation extends DropOperation {
|
||||
constructor(db, name, options) {
|
||||
super(db, options);
|
||||
|
||||
this.name = name;
|
||||
this.namespace = `${db.namespace}.${name}`;
|
||||
}
|
||||
|
||||
_buildCommand() {
|
||||
return { drop: this.name };
|
||||
}
|
||||
}
|
||||
|
||||
class DropDatabaseOperation extends DropOperation {
|
||||
_buildCommand() {
|
||||
return { dropDatabase: 1 };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
DropOperation,
|
||||
DropCollectionOperation,
|
||||
DropDatabaseOperation
|
||||
};
|
||||
42
node_modules/mongodb/lib/operations/drop_index.js
generated
vendored
Normal file
42
node_modules/mongodb/lib/operations/drop_index.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const CommandOperation = require('./command');
|
||||
const applyWriteConcern = require('../utils').applyWriteConcern;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
|
||||
class DropIndexOperation extends CommandOperation {
|
||||
constructor(collection, indexName, options) {
|
||||
super(collection.s.db, options, collection);
|
||||
|
||||
this.collection = collection;
|
||||
this.indexName = indexName;
|
||||
}
|
||||
|
||||
_buildCommand() {
|
||||
const collection = this.collection;
|
||||
const indexName = this.indexName;
|
||||
const options = this.options;
|
||||
|
||||
let cmd = { dropIndexes: collection.collectionName, index: indexName };
|
||||
|
||||
// Decorate command with writeConcern if supported
|
||||
cmd = applyWriteConcern(cmd, { db: collection.s.db, collection }, options);
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
// Execute command
|
||||
super.execute((err, result) => {
|
||||
if (typeof callback !== 'function') return;
|
||||
if (err) return handleCallback(callback, err, null);
|
||||
handleCallback(callback, null, result);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(DropIndexOperation, Aspect.WRITE_OPERATION);
|
||||
|
||||
module.exports = DropIndexOperation;
|
||||
23
node_modules/mongodb/lib/operations/drop_indexes.js
generated
vendored
Normal file
23
node_modules/mongodb/lib/operations/drop_indexes.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const DropIndexOperation = require('./drop_index');
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
|
||||
class DropIndexesOperation extends DropIndexOperation {
|
||||
constructor(collection, options) {
|
||||
super(collection, '*', options);
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
super.execute(err => {
|
||||
if (err) return handleCallback(callback, err, false);
|
||||
handleCallback(callback, null, true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(DropIndexesOperation, Aspect.WRITE_OPERATION);
|
||||
|
||||
module.exports = DropIndexesOperation;
|
||||
58
node_modules/mongodb/lib/operations/estimated_document_count.js
generated
vendored
Normal file
58
node_modules/mongodb/lib/operations/estimated_document_count.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const CommandOperationV2 = require('./command_v2');
|
||||
|
||||
class EstimatedDocumentCountOperation extends CommandOperationV2 {
|
||||
constructor(collection, query, options) {
|
||||
if (typeof options === 'undefined') {
|
||||
options = query;
|
||||
query = undefined;
|
||||
}
|
||||
|
||||
super(collection, options);
|
||||
this.collectionName = collection.s.namespace.collection;
|
||||
if (query) {
|
||||
this.query = query;
|
||||
}
|
||||
}
|
||||
|
||||
execute(server, callback) {
|
||||
const options = this.options;
|
||||
const cmd = { count: this.collectionName };
|
||||
|
||||
if (this.query) {
|
||||
cmd.query = this.query;
|
||||
}
|
||||
|
||||
if (typeof options.skip === 'number') {
|
||||
cmd.skip = options.skip;
|
||||
}
|
||||
|
||||
if (typeof options.limit === 'number') {
|
||||
cmd.limit = options.limit;
|
||||
}
|
||||
|
||||
if (options.hint) {
|
||||
cmd.hint = options.hint;
|
||||
}
|
||||
|
||||
super.executeCommand(server, cmd, (err, response) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(null, response.n);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(EstimatedDocumentCountOperation, [
|
||||
Aspect.READ_OPERATION,
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.EXECUTE_WITH_SELECTION
|
||||
]);
|
||||
|
||||
module.exports = EstimatedDocumentCountOperation;
|
||||
34
node_modules/mongodb/lib/operations/execute_db_admin_command.js
generated
vendored
Normal file
34
node_modules/mongodb/lib/operations/execute_db_admin_command.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const MongoError = require('../core').MongoError;
|
||||
const MongoDBNamespace = require('../utils').MongoDBNamespace;
|
||||
|
||||
class ExecuteDbAdminCommandOperation extends OperationBase {
|
||||
constructor(db, selector, options) {
|
||||
super(options);
|
||||
|
||||
this.db = db;
|
||||
this.selector = selector;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const db = this.db;
|
||||
const selector = this.selector;
|
||||
const options = this.options;
|
||||
|
||||
const namespace = new MongoDBNamespace('admin', '$cmd');
|
||||
db.s.topology.command(namespace, selector, options, (err, result) => {
|
||||
// Did the user destroy the topology
|
||||
if (db.serverConfig && db.serverConfig.isDestroyed()) {
|
||||
return callback(new MongoError('topology was destroyed'));
|
||||
}
|
||||
|
||||
if (err) return handleCallback(callback, err);
|
||||
handleCallback(callback, null, result.result);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ExecuteDbAdminCommandOperation;
|
||||
186
node_modules/mongodb/lib/operations/execute_operation.js
generated
vendored
Normal file
186
node_modules/mongodb/lib/operations/execute_operation.js
generated
vendored
Normal file
@@ -0,0 +1,186 @@
|
||||
'use strict';
|
||||
|
||||
const MongoError = require('../core/error').MongoError;
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const ReadPreference = require('../core/topologies/read_preference');
|
||||
const isRetryableError = require('../core/error').isRetryableError;
|
||||
const maxWireVersion = require('../core/utils').maxWireVersion;
|
||||
const isUnifiedTopology = require('../core/utils').isUnifiedTopology;
|
||||
|
||||
/**
|
||||
* Executes the given operation with provided arguments.
|
||||
*
|
||||
* This method reduces large amounts of duplication in the entire codebase by providing
|
||||
* a single point for determining whether callbacks or promises should be used. Additionally
|
||||
* it allows for a single point of entry to provide features such as implicit sessions, which
|
||||
* are required by the Driver Sessions specification in the event that a ClientSession is
|
||||
* not provided
|
||||
*
|
||||
* @param {object} topology The topology to execute this operation on
|
||||
* @param {Operation} operation The operation to execute
|
||||
* @param {function} callback The command result callback
|
||||
*/
|
||||
function executeOperation(topology, operation, callback) {
|
||||
if (topology == null) {
|
||||
throw new TypeError('This method requires a valid topology instance');
|
||||
}
|
||||
|
||||
if (!(operation instanceof OperationBase)) {
|
||||
throw new TypeError('This method requires a valid operation instance');
|
||||
}
|
||||
|
||||
if (isUnifiedTopology(topology) && topology.shouldCheckForSessionSupport()) {
|
||||
return selectServerForSessionSupport(topology, operation, callback);
|
||||
}
|
||||
|
||||
const Promise = topology.s.promiseLibrary;
|
||||
|
||||
// The driver sessions spec mandates that we implicitly create sessions for operations
|
||||
// that are not explicitly provided with a session.
|
||||
let session, owner;
|
||||
if (topology.hasSessionSupport()) {
|
||||
if (operation.session == null) {
|
||||
owner = Symbol();
|
||||
session = topology.startSession({ owner });
|
||||
operation.session = session;
|
||||
} else if (operation.session.hasEnded) {
|
||||
throw new MongoError('Use of expired sessions is not permitted');
|
||||
}
|
||||
}
|
||||
|
||||
let result;
|
||||
if (typeof callback !== 'function') {
|
||||
result = new Promise((resolve, reject) => {
|
||||
callback = (err, res) => {
|
||||
if (err) return reject(err);
|
||||
resolve(res);
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function executeCallback(err, result) {
|
||||
if (session && session.owner === owner) {
|
||||
session.endSession();
|
||||
if (operation.session === session) {
|
||||
operation.clearSession();
|
||||
}
|
||||
}
|
||||
|
||||
callback(err, result);
|
||||
}
|
||||
|
||||
try {
|
||||
if (operation.hasAspect(Aspect.EXECUTE_WITH_SELECTION)) {
|
||||
executeWithServerSelection(topology, operation, executeCallback);
|
||||
} else {
|
||||
operation.execute(executeCallback);
|
||||
}
|
||||
} catch (e) {
|
||||
if (session && session.owner === owner) {
|
||||
session.endSession();
|
||||
if (operation.session === session) {
|
||||
operation.clearSession();
|
||||
}
|
||||
}
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function supportsRetryableReads(server) {
|
||||
return maxWireVersion(server) >= 6;
|
||||
}
|
||||
|
||||
function executeWithServerSelection(topology, operation, callback) {
|
||||
const readPreference = operation.readPreference || ReadPreference.primary;
|
||||
const inTransaction = operation.session && operation.session.inTransaction();
|
||||
|
||||
if (inTransaction && !readPreference.equals(ReadPreference.primary)) {
|
||||
callback(
|
||||
new MongoError(
|
||||
`Read preference in a transaction must be primary, not: ${readPreference.mode}`
|
||||
)
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const serverSelectionOptions = {
|
||||
readPreference,
|
||||
session: operation.session
|
||||
};
|
||||
|
||||
function callbackWithRetry(err, result) {
|
||||
if (err == null) {
|
||||
return callback(null, result);
|
||||
}
|
||||
|
||||
if (!isRetryableError(err)) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// select a new server, and attempt to retry the operation
|
||||
topology.selectServer(serverSelectionOptions, (err, server) => {
|
||||
if (err || !supportsRetryableReads(server)) {
|
||||
callback(err, null);
|
||||
return;
|
||||
}
|
||||
|
||||
operation.execute(server, callback);
|
||||
});
|
||||
}
|
||||
|
||||
// select a server, and execute the operation against it
|
||||
topology.selectServer(serverSelectionOptions, (err, server) => {
|
||||
if (err) {
|
||||
callback(err, null);
|
||||
return;
|
||||
}
|
||||
|
||||
const shouldRetryReads =
|
||||
topology.s.options.retryReads !== false &&
|
||||
operation.session &&
|
||||
!inTransaction &&
|
||||
supportsRetryableReads(server) &&
|
||||
operation.canRetryRead;
|
||||
|
||||
if (operation.hasAspect(Aspect.RETRYABLE) && shouldRetryReads) {
|
||||
operation.execute(server, callbackWithRetry);
|
||||
return;
|
||||
}
|
||||
|
||||
operation.execute(server, callback);
|
||||
});
|
||||
}
|
||||
|
||||
// TODO: This is only supported for unified topology, it should go away once
|
||||
// we remove support for legacy topology types.
|
||||
function selectServerForSessionSupport(topology, operation, callback) {
|
||||
const Promise = topology.s.promiseLibrary;
|
||||
|
||||
let result;
|
||||
if (typeof callback !== 'function') {
|
||||
result = new Promise((resolve, reject) => {
|
||||
callback = (err, result) => {
|
||||
if (err) return reject(err);
|
||||
resolve(result);
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
topology.selectServer(ReadPreference.primaryPreferred, err => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
executeOperation(topology, operation, callback);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = executeOperation;
|
||||
40
node_modules/mongodb/lib/operations/find.js
generated
vendored
Normal file
40
node_modules/mongodb/lib/operations/find.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const ReadPreference = require('../core').ReadPreference;
|
||||
const maxWireVersion = require('../core/utils').maxWireVersion;
|
||||
const MongoError = require('../core/error').MongoError;
|
||||
|
||||
class FindOperation extends OperationBase {
|
||||
constructor(collection, ns, command, options) {
|
||||
super(options);
|
||||
|
||||
this.ns = ns;
|
||||
this.cmd = command;
|
||||
this.readPreference = ReadPreference.resolve(collection, this.options);
|
||||
}
|
||||
|
||||
execute(server, callback) {
|
||||
// copied from `CommandOperationV2`, to be subclassed in the future
|
||||
this.server = server;
|
||||
|
||||
if (typeof this.cmd.allowDiskUse !== 'undefined' && maxWireVersion(server) < 4) {
|
||||
callback(new MongoError('The `allowDiskUse` option is not supported on MongoDB < 3.2'));
|
||||
return;
|
||||
}
|
||||
|
||||
// TOOD: use `MongoDBNamespace` through and through
|
||||
const cursorState = this.cursorState || {};
|
||||
server.query(this.ns.toString(), this.cmd, cursorState, this.options, callback);
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(FindOperation, [
|
||||
Aspect.READ_OPERATION,
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.EXECUTE_WITH_SELECTION
|
||||
]);
|
||||
|
||||
module.exports = FindOperation;
|
||||
115
node_modules/mongodb/lib/operations/find_and_modify.js
generated
vendored
Normal file
115
node_modules/mongodb/lib/operations/find_and_modify.js
generated
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const applyRetryableWrites = require('../utils').applyRetryableWrites;
|
||||
const applyWriteConcern = require('../utils').applyWriteConcern;
|
||||
const decorateWithCollation = require('../utils').decorateWithCollation;
|
||||
const executeCommand = require('./db_ops').executeCommand;
|
||||
const formattedOrderClause = require('../utils').formattedOrderClause;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const ReadPreference = require('../core').ReadPreference;
|
||||
const maxWireVersion = require('../core/utils').maxWireVersion;
|
||||
const MongoError = require('../error').MongoError;
|
||||
|
||||
class FindAndModifyOperation extends OperationBase {
|
||||
constructor(collection, query, sort, doc, options) {
|
||||
super(options);
|
||||
|
||||
this.collection = collection;
|
||||
this.query = query;
|
||||
this.sort = sort;
|
||||
this.doc = doc;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
const query = this.query;
|
||||
const sort = formattedOrderClause(this.sort);
|
||||
const doc = this.doc;
|
||||
let options = this.options;
|
||||
|
||||
// Create findAndModify command object
|
||||
const queryObject = {
|
||||
findAndModify: coll.collectionName,
|
||||
query: query
|
||||
};
|
||||
|
||||
if (sort) {
|
||||
queryObject.sort = sort;
|
||||
}
|
||||
|
||||
queryObject.new = options.new ? true : false;
|
||||
queryObject.remove = options.remove ? true : false;
|
||||
queryObject.upsert = options.upsert ? true : false;
|
||||
|
||||
const projection = options.projection || options.fields;
|
||||
|
||||
if (projection) {
|
||||
queryObject.fields = projection;
|
||||
}
|
||||
|
||||
if (options.arrayFilters) {
|
||||
queryObject.arrayFilters = options.arrayFilters;
|
||||
}
|
||||
|
||||
if (doc && !options.remove) {
|
||||
queryObject.update = doc;
|
||||
}
|
||||
|
||||
if (options.maxTimeMS) queryObject.maxTimeMS = options.maxTimeMS;
|
||||
|
||||
// Either use override on the function, or go back to default on either the collection
|
||||
// level or db
|
||||
options.serializeFunctions = options.serializeFunctions || coll.s.serializeFunctions;
|
||||
|
||||
// No check on the documents
|
||||
options.checkKeys = false;
|
||||
|
||||
// Final options for retryable writes and write concern
|
||||
options = applyRetryableWrites(options, coll.s.db);
|
||||
options = applyWriteConcern(options, { db: coll.s.db, collection: coll }, options);
|
||||
|
||||
// Decorate the findAndModify command with the write Concern
|
||||
if (options.writeConcern) {
|
||||
queryObject.writeConcern = options.writeConcern;
|
||||
}
|
||||
|
||||
// Have we specified bypassDocumentValidation
|
||||
if (options.bypassDocumentValidation === true) {
|
||||
queryObject.bypassDocumentValidation = options.bypassDocumentValidation;
|
||||
}
|
||||
|
||||
options.readPreference = ReadPreference.primary;
|
||||
|
||||
// Have we specified collation
|
||||
try {
|
||||
decorateWithCollation(queryObject, coll, options);
|
||||
} catch (err) {
|
||||
return callback(err, null);
|
||||
}
|
||||
|
||||
if (options.hint) {
|
||||
// TODO: once this method becomes a CommandOperationV2 we will have the server
|
||||
// in place to check.
|
||||
const unacknowledgedWrite = options.writeConcern && options.writeConcern.w === 0;
|
||||
if (unacknowledgedWrite || maxWireVersion(coll.s.topology) < 8) {
|
||||
callback(
|
||||
new MongoError('The current topology does not support a hint on findAndModify commands')
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
queryObject.hint = options.hint;
|
||||
}
|
||||
|
||||
// Execute the command
|
||||
executeCommand(coll.s.db, queryObject, options, (err, result) => {
|
||||
if (err) return handleCallback(callback, err, null);
|
||||
|
||||
return handleCallback(callback, null, result);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FindAndModifyOperation;
|
||||
37
node_modules/mongodb/lib/operations/find_one.js
generated
vendored
Normal file
37
node_modules/mongodb/lib/operations/find_one.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
'use strict';
|
||||
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const toError = require('../utils').toError;
|
||||
|
||||
class FindOneOperation extends OperationBase {
|
||||
constructor(collection, query, options) {
|
||||
super(options);
|
||||
|
||||
this.collection = collection;
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
const query = this.query;
|
||||
const options = this.options;
|
||||
|
||||
try {
|
||||
const cursor = coll
|
||||
.find(query, options)
|
||||
.limit(-1)
|
||||
.batchSize(1);
|
||||
|
||||
// Return the item
|
||||
cursor.next((err, item) => {
|
||||
if (err != null) return handleCallback(callback, toError(err), null);
|
||||
handleCallback(callback, null, item);
|
||||
});
|
||||
} catch (e) {
|
||||
callback(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FindOneOperation;
|
||||
21
node_modules/mongodb/lib/operations/find_one_and_delete.js
generated
vendored
Normal file
21
node_modules/mongodb/lib/operations/find_one_and_delete.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
'use strict';
|
||||
|
||||
const FindAndModifyOperation = require('./find_and_modify');
|
||||
|
||||
class FindOneAndDeleteOperation extends FindAndModifyOperation {
|
||||
constructor(collection, filter, options) {
|
||||
// Final options
|
||||
const finalOptions = Object.assign({}, options);
|
||||
finalOptions.fields = options.projection;
|
||||
finalOptions.remove = true;
|
||||
|
||||
// Basic validation
|
||||
if (filter == null || typeof filter !== 'object') {
|
||||
throw new TypeError('Filter parameter must be an object');
|
||||
}
|
||||
|
||||
super(collection, filter, finalOptions.sort, null, finalOptions);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FindOneAndDeleteOperation;
|
||||
31
node_modules/mongodb/lib/operations/find_one_and_replace.js
generated
vendored
Normal file
31
node_modules/mongodb/lib/operations/find_one_and_replace.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
'use strict';
|
||||
|
||||
const FindAndModifyOperation = require('./find_and_modify');
|
||||
const hasAtomicOperators = require('../utils').hasAtomicOperators;
|
||||
|
||||
class FindOneAndReplaceOperation extends FindAndModifyOperation {
|
||||
constructor(collection, filter, replacement, options) {
|
||||
// Final options
|
||||
const finalOptions = Object.assign({}, options);
|
||||
finalOptions.fields = options.projection;
|
||||
finalOptions.update = true;
|
||||
finalOptions.new = options.returnOriginal !== void 0 ? !options.returnOriginal : false;
|
||||
finalOptions.upsert = options.upsert !== void 0 ? !!options.upsert : false;
|
||||
|
||||
if (filter == null || typeof filter !== 'object') {
|
||||
throw new TypeError('Filter parameter must be an object');
|
||||
}
|
||||
|
||||
if (replacement == null || typeof replacement !== 'object') {
|
||||
throw new TypeError('Replacement parameter must be an object');
|
||||
}
|
||||
|
||||
if (hasAtomicOperators(replacement)) {
|
||||
throw new TypeError('Replacement document must not contain atomic operators');
|
||||
}
|
||||
|
||||
super(collection, filter, finalOptions.sort, replacement, finalOptions);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FindOneAndReplaceOperation;
|
||||
32
node_modules/mongodb/lib/operations/find_one_and_update.js
generated
vendored
Normal file
32
node_modules/mongodb/lib/operations/find_one_and_update.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
'use strict';
|
||||
|
||||
const FindAndModifyOperation = require('./find_and_modify');
|
||||
const hasAtomicOperators = require('../utils').hasAtomicOperators;
|
||||
|
||||
class FindOneAndUpdateOperation extends FindAndModifyOperation {
|
||||
constructor(collection, filter, update, options) {
|
||||
// Final options
|
||||
const finalOptions = Object.assign({}, options);
|
||||
finalOptions.fields = options.projection;
|
||||
finalOptions.update = true;
|
||||
finalOptions.new =
|
||||
typeof options.returnOriginal === 'boolean' ? !options.returnOriginal : false;
|
||||
finalOptions.upsert = typeof options.upsert === 'boolean' ? options.upsert : false;
|
||||
|
||||
if (filter == null || typeof filter !== 'object') {
|
||||
throw new TypeError('Filter parameter must be an object');
|
||||
}
|
||||
|
||||
if (update == null || typeof update !== 'object') {
|
||||
throw new TypeError('Update parameter must be an object');
|
||||
}
|
||||
|
||||
if (!hasAtomicOperators(update)) {
|
||||
throw new TypeError('Update document requires atomic operators');
|
||||
}
|
||||
|
||||
super(collection, filter, finalOptions.sort, update, finalOptions);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FindOneAndUpdateOperation;
|
||||
79
node_modules/mongodb/lib/operations/geo_haystack_search.js
generated
vendored
Normal file
79
node_modules/mongodb/lib/operations/geo_haystack_search.js
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const decorateCommand = require('../utils').decorateCommand;
|
||||
const decorateWithReadConcern = require('../utils').decorateWithReadConcern;
|
||||
const executeCommand = require('./db_ops').executeCommand;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const ReadPreference = require('../core').ReadPreference;
|
||||
const toError = require('../utils').toError;
|
||||
|
||||
/**
|
||||
* Execute a geo search using a geo haystack index on a collection.
|
||||
*
|
||||
* @class
|
||||
* @property {Collection} a Collection instance.
|
||||
* @property {number} x Point to search on the x axis, ensure the indexes are ordered in the same order.
|
||||
* @property {number} y Point to search on the y axis, ensure the indexes are ordered in the same order.
|
||||
* @property {object} [options] Optional settings. See Collection.prototype.geoHaystackSearch for a list of options.
|
||||
*/
|
||||
class GeoHaystackSearchOperation extends OperationBase {
|
||||
/**
|
||||
* Construct a GeoHaystackSearch operation.
|
||||
*
|
||||
* @param {Collection} a Collection instance.
|
||||
* @param {number} x Point to search on the x axis, ensure the indexes are ordered in the same order.
|
||||
* @param {number} y Point to search on the y axis, ensure the indexes are ordered in the same order.
|
||||
* @param {object} [options] Optional settings. See Collection.prototype.geoHaystackSearch for a list of options.
|
||||
*/
|
||||
constructor(collection, x, y, options) {
|
||||
super(options);
|
||||
|
||||
this.collection = collection;
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the operation.
|
||||
*
|
||||
* @param {Collection~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
const x = this.x;
|
||||
const y = this.y;
|
||||
let options = this.options;
|
||||
|
||||
// Build command object
|
||||
let commandObject = {
|
||||
geoSearch: coll.collectionName,
|
||||
near: [x, y]
|
||||
};
|
||||
|
||||
// Remove read preference from hash if it exists
|
||||
commandObject = decorateCommand(commandObject, options, ['readPreference', 'session']);
|
||||
|
||||
options = Object.assign({}, options);
|
||||
// Ensure we have the right read preference inheritance
|
||||
options.readPreference = ReadPreference.resolve(coll, options);
|
||||
|
||||
// Do we have a readConcern specified
|
||||
decorateWithReadConcern(commandObject, coll, options);
|
||||
|
||||
// Execute the command
|
||||
executeCommand(coll.s.db, commandObject, options, (err, res) => {
|
||||
if (err) return handleCallback(callback, err);
|
||||
if (res.err || res.errmsg) handleCallback(callback, toError(res));
|
||||
// should we only be returning res.results here? Not sure if the user
|
||||
// should see the other return information
|
||||
handleCallback(callback, null, res);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(GeoHaystackSearchOperation, Aspect.READ_OPERATION);
|
||||
|
||||
module.exports = GeoHaystackSearchOperation;
|
||||
39
node_modules/mongodb/lib/operations/index_exists.js
generated
vendored
Normal file
39
node_modules/mongodb/lib/operations/index_exists.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const indexInformationDb = require('./db_ops').indexInformation;
|
||||
|
||||
class IndexExistsOperation extends OperationBase {
|
||||
constructor(collection, indexes, options) {
|
||||
super(options);
|
||||
|
||||
this.collection = collection;
|
||||
this.indexes = indexes;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
const indexes = this.indexes;
|
||||
const options = this.options;
|
||||
|
||||
indexInformationDb(coll.s.db, coll.collectionName, options, (err, indexInformation) => {
|
||||
// If we have an error return
|
||||
if (err != null) return handleCallback(callback, err, null);
|
||||
// Let's check for the index names
|
||||
if (!Array.isArray(indexes))
|
||||
return handleCallback(callback, null, indexInformation[indexes] != null);
|
||||
// Check in list of indexes
|
||||
for (let i = 0; i < indexes.length; i++) {
|
||||
if (indexInformation[indexes[i]] == null) {
|
||||
return handleCallback(callback, null, false);
|
||||
}
|
||||
}
|
||||
|
||||
// All keys found return true
|
||||
return handleCallback(callback, null, true);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = IndexExistsOperation;
|
||||
23
node_modules/mongodb/lib/operations/index_information.js
generated
vendored
Normal file
23
node_modules/mongodb/lib/operations/index_information.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const indexInformation = require('./common_functions').indexInformation;
|
||||
|
||||
class IndexInformationOperation extends OperationBase {
|
||||
constructor(db, name, options) {
|
||||
super(options);
|
||||
|
||||
this.db = db;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const db = this.db;
|
||||
const name = this.name;
|
||||
const options = this.options;
|
||||
|
||||
indexInformation(db, name, options, callback);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = IndexInformationOperation;
|
||||
22
node_modules/mongodb/lib/operations/indexes.js
generated
vendored
Normal file
22
node_modules/mongodb/lib/operations/indexes.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const indexInformation = require('./common_functions').indexInformation;
|
||||
|
||||
class IndexesOperation extends OperationBase {
|
||||
constructor(collection, options) {
|
||||
super(options);
|
||||
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
let options = this.options;
|
||||
|
||||
options = Object.assign({}, { full: true }, options);
|
||||
indexInformation(coll.s.db, coll.collectionName, options, callback);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = IndexesOperation;
|
||||
63
node_modules/mongodb/lib/operations/insert_many.js
generated
vendored
Normal file
63
node_modules/mongodb/lib/operations/insert_many.js
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const BulkWriteOperation = require('./bulk_write');
|
||||
const MongoError = require('../core').MongoError;
|
||||
const prepareDocs = require('./common_functions').prepareDocs;
|
||||
|
||||
class InsertManyOperation extends OperationBase {
|
||||
constructor(collection, docs, options) {
|
||||
super(options);
|
||||
|
||||
this.collection = collection;
|
||||
this.docs = docs;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
let docs = this.docs;
|
||||
const options = this.options;
|
||||
|
||||
if (!Array.isArray(docs)) {
|
||||
return callback(
|
||||
MongoError.create({ message: 'docs parameter must be an array of documents', driver: true })
|
||||
);
|
||||
}
|
||||
|
||||
// If keep going set unordered
|
||||
options['serializeFunctions'] = options['serializeFunctions'] || coll.s.serializeFunctions;
|
||||
|
||||
docs = prepareDocs(coll, docs, options);
|
||||
|
||||
// Generate the bulk write operations
|
||||
const operations = [
|
||||
{
|
||||
insertMany: docs
|
||||
}
|
||||
];
|
||||
|
||||
const bulkWriteOperation = new BulkWriteOperation(coll, operations, options);
|
||||
|
||||
bulkWriteOperation.execute((err, result) => {
|
||||
if (err) return callback(err, null);
|
||||
callback(null, mapInsertManyResults(docs, result));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function mapInsertManyResults(docs, r) {
|
||||
const finalResult = {
|
||||
result: { ok: 1, n: r.insertedCount },
|
||||
ops: docs,
|
||||
insertedCount: r.insertedCount,
|
||||
insertedIds: r.insertedIds
|
||||
};
|
||||
|
||||
if (r.getLastOp()) {
|
||||
finalResult.result.opTime = r.getLastOp();
|
||||
}
|
||||
|
||||
return finalResult;
|
||||
}
|
||||
|
||||
module.exports = InsertManyOperation;
|
||||
39
node_modules/mongodb/lib/operations/insert_one.js
generated
vendored
Normal file
39
node_modules/mongodb/lib/operations/insert_one.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
'use strict';
|
||||
|
||||
const MongoError = require('../core').MongoError;
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const insertDocuments = require('./common_functions').insertDocuments;
|
||||
|
||||
class InsertOneOperation extends OperationBase {
|
||||
constructor(collection, doc, options) {
|
||||
super(options);
|
||||
|
||||
this.collection = collection;
|
||||
this.doc = doc;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
const doc = this.doc;
|
||||
const options = this.options;
|
||||
|
||||
if (Array.isArray(doc)) {
|
||||
return callback(
|
||||
MongoError.create({ message: 'doc parameter must be an object', driver: true })
|
||||
);
|
||||
}
|
||||
|
||||
insertDocuments(coll, [doc], options, (err, r) => {
|
||||
if (callback == null) return;
|
||||
if (err && callback) return callback(err);
|
||||
// Workaround for pre 2.6 servers
|
||||
if (r == null) return callback(null, { result: { ok: 1 } });
|
||||
// Add values to top level to ensure crud spec compatibility
|
||||
r.insertedCount = r.result.n;
|
||||
r.insertedId = doc._id;
|
||||
if (callback) callback(null, r);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = InsertOneOperation;
|
||||
19
node_modules/mongodb/lib/operations/is_capped.js
generated
vendored
Normal file
19
node_modules/mongodb/lib/operations/is_capped.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
'use strict';
|
||||
|
||||
const OptionsOperation = require('./options_operation');
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
|
||||
class IsCappedOperation extends OptionsOperation {
|
||||
constructor(collection, options) {
|
||||
super(collection, options);
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
super.execute((err, document) => {
|
||||
if (err) return handleCallback(callback, err);
|
||||
handleCallback(callback, null, !!(document && document.capped));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = IsCappedOperation;
|
||||
106
node_modules/mongodb/lib/operations/list_collections.js
generated
vendored
Normal file
106
node_modules/mongodb/lib/operations/list_collections.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
'use strict';
|
||||
|
||||
const CommandOperationV2 = require('./command_v2');
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const maxWireVersion = require('../core/utils').maxWireVersion;
|
||||
const CONSTANTS = require('../constants');
|
||||
|
||||
const LIST_COLLECTIONS_WIRE_VERSION = 3;
|
||||
|
||||
function listCollectionsTransforms(databaseName) {
|
||||
const matching = `${databaseName}.`;
|
||||
|
||||
return {
|
||||
doc: doc => {
|
||||
const index = doc.name.indexOf(matching);
|
||||
// Remove database name if available
|
||||
if (doc.name && index === 0) {
|
||||
doc.name = doc.name.substr(index + matching.length);
|
||||
}
|
||||
|
||||
return doc;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
class ListCollectionsOperation extends CommandOperationV2 {
|
||||
constructor(db, filter, options) {
|
||||
super(db, options, { fullResponse: true });
|
||||
|
||||
this.db = db;
|
||||
this.filter = filter;
|
||||
this.nameOnly = !!this.options.nameOnly;
|
||||
|
||||
if (typeof this.options.batchSize === 'number') {
|
||||
this.batchSize = this.options.batchSize;
|
||||
}
|
||||
}
|
||||
|
||||
execute(server, callback) {
|
||||
if (maxWireVersion(server) < LIST_COLLECTIONS_WIRE_VERSION) {
|
||||
let filter = this.filter;
|
||||
const databaseName = this.db.s.namespace.db;
|
||||
|
||||
// If we have legacy mode and have not provided a full db name filter it
|
||||
if (
|
||||
typeof filter.name === 'string' &&
|
||||
!new RegExp('^' + databaseName + '\\.').test(filter.name)
|
||||
) {
|
||||
filter = Object.assign({}, filter);
|
||||
filter.name = this.db.s.namespace.withCollection(filter.name).toString();
|
||||
}
|
||||
|
||||
// No filter, filter by current database
|
||||
if (filter == null) {
|
||||
filter.name = `/${databaseName}/`;
|
||||
}
|
||||
|
||||
// Rewrite the filter to use $and to filter out indexes
|
||||
if (filter.name) {
|
||||
filter = { $and: [{ name: filter.name }, { name: /^((?!\$).)*$/ }] };
|
||||
} else {
|
||||
filter = { name: /^((?!\$).)*$/ };
|
||||
}
|
||||
|
||||
const transforms = listCollectionsTransforms(databaseName);
|
||||
server.query(
|
||||
`${databaseName}.${CONSTANTS.SYSTEM_NAMESPACE_COLLECTION}`,
|
||||
{ query: filter },
|
||||
{ batchSize: this.batchSize || 1000 },
|
||||
{},
|
||||
(err, result) => {
|
||||
if (
|
||||
result &&
|
||||
result.message &&
|
||||
result.message.documents &&
|
||||
Array.isArray(result.message.documents)
|
||||
) {
|
||||
result.message.documents = result.message.documents.map(transforms.doc);
|
||||
}
|
||||
|
||||
callback(err, result);
|
||||
}
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const command = {
|
||||
listCollections: 1,
|
||||
filter: this.filter,
|
||||
cursor: this.batchSize ? { batchSize: this.batchSize } : {},
|
||||
nameOnly: this.nameOnly
|
||||
};
|
||||
|
||||
return super.executeCommand(server, command, callback);
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(ListCollectionsOperation, [
|
||||
Aspect.READ_OPERATION,
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.EXECUTE_WITH_SELECTION
|
||||
]);
|
||||
|
||||
module.exports = ListCollectionsOperation;
|
||||
38
node_modules/mongodb/lib/operations/list_databases.js
generated
vendored
Normal file
38
node_modules/mongodb/lib/operations/list_databases.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
'use strict';
|
||||
|
||||
const CommandOperationV2 = require('./command_v2');
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const MongoDBNamespace = require('../utils').MongoDBNamespace;
|
||||
|
||||
class ListDatabasesOperation extends CommandOperationV2 {
|
||||
constructor(db, options) {
|
||||
super(db, options);
|
||||
this.ns = new MongoDBNamespace('admin', '$cmd');
|
||||
}
|
||||
|
||||
execute(server, callback) {
|
||||
const cmd = { listDatabases: 1 };
|
||||
if (this.options.nameOnly) {
|
||||
cmd.nameOnly = Number(cmd.nameOnly);
|
||||
}
|
||||
|
||||
if (this.options.filter) {
|
||||
cmd.filter = this.options.filter;
|
||||
}
|
||||
|
||||
if (typeof this.options.authorizedDatabases === 'boolean') {
|
||||
cmd.authorizedDatabases = this.options.authorizedDatabases;
|
||||
}
|
||||
|
||||
super.executeCommand(server, cmd, callback);
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(ListDatabasesOperation, [
|
||||
Aspect.READ_OPERATION,
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.EXECUTE_WITH_SELECTION
|
||||
]);
|
||||
|
||||
module.exports = ListDatabasesOperation;
|
||||
42
node_modules/mongodb/lib/operations/list_indexes.js
generated
vendored
Normal file
42
node_modules/mongodb/lib/operations/list_indexes.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
'use strict';
|
||||
|
||||
const CommandOperationV2 = require('./command_v2');
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const maxWireVersion = require('../core/utils').maxWireVersion;
|
||||
|
||||
const LIST_INDEXES_WIRE_VERSION = 3;
|
||||
|
||||
class ListIndexesOperation extends CommandOperationV2 {
|
||||
constructor(collection, options) {
|
||||
super(collection, options, { fullResponse: true });
|
||||
|
||||
this.collectionNamespace = collection.s.namespace;
|
||||
}
|
||||
|
||||
execute(server, callback) {
|
||||
const serverWireVersion = maxWireVersion(server);
|
||||
if (serverWireVersion < LIST_INDEXES_WIRE_VERSION) {
|
||||
const systemIndexesNS = this.collectionNamespace.withCollection('system.indexes').toString();
|
||||
const collectionNS = this.collectionNamespace.toString();
|
||||
|
||||
server.query(systemIndexesNS, { query: { ns: collectionNS } }, {}, this.options, callback);
|
||||
return;
|
||||
}
|
||||
|
||||
const cursor = this.options.batchSize ? { batchSize: this.options.batchSize } : {};
|
||||
super.executeCommand(
|
||||
server,
|
||||
{ listIndexes: this.collectionNamespace.collection, cursor },
|
||||
callback
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(ListIndexesOperation, [
|
||||
Aspect.READ_OPERATION,
|
||||
Aspect.RETRYABLE,
|
||||
Aspect.EXECUTE_WITH_SELECTION
|
||||
]);
|
||||
|
||||
module.exports = ListIndexesOperation;
|
||||
190
node_modules/mongodb/lib/operations/map_reduce.js
generated
vendored
Normal file
190
node_modules/mongodb/lib/operations/map_reduce.js
generated
vendored
Normal file
@@ -0,0 +1,190 @@
|
||||
'use strict';
|
||||
|
||||
const applyWriteConcern = require('../utils').applyWriteConcern;
|
||||
const Code = require('../core').BSON.Code;
|
||||
const decorateWithCollation = require('../utils').decorateWithCollation;
|
||||
const decorateWithReadConcern = require('../utils').decorateWithReadConcern;
|
||||
const executeCommand = require('./db_ops').executeCommand;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const isObject = require('../utils').isObject;
|
||||
const loadDb = require('../dynamic_loaders').loadDb;
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const ReadPreference = require('../core').ReadPreference;
|
||||
const toError = require('../utils').toError;
|
||||
|
||||
const exclusionList = [
|
||||
'readPreference',
|
||||
'session',
|
||||
'bypassDocumentValidation',
|
||||
'w',
|
||||
'wtimeout',
|
||||
'j',
|
||||
'writeConcern'
|
||||
];
|
||||
|
||||
/**
|
||||
* Run Map Reduce across a collection. Be aware that the inline option for out will return an array of results not a collection.
|
||||
*
|
||||
* @class
|
||||
* @property {Collection} a Collection instance.
|
||||
* @property {(function|string)} map The mapping function.
|
||||
* @property {(function|string)} reduce The reduce function.
|
||||
* @property {object} [options] Optional settings. See Collection.prototype.mapReduce for a list of options.
|
||||
*/
|
||||
class MapReduceOperation extends OperationBase {
|
||||
/**
|
||||
* Constructs a MapReduce operation.
|
||||
*
|
||||
* @param {Collection} a Collection instance.
|
||||
* @param {(function|string)} map The mapping function.
|
||||
* @param {(function|string)} reduce The reduce function.
|
||||
* @param {object} [options] Optional settings. See Collection.prototype.mapReduce for a list of options.
|
||||
*/
|
||||
constructor(collection, map, reduce, options) {
|
||||
super(options);
|
||||
|
||||
this.collection = collection;
|
||||
this.map = map;
|
||||
this.reduce = reduce;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the operation.
|
||||
*
|
||||
* @param {Collection~resultCallback} [callback] The command result callback
|
||||
*/
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
const map = this.map;
|
||||
const reduce = this.reduce;
|
||||
let options = this.options;
|
||||
|
||||
const mapCommandHash = {
|
||||
mapReduce: coll.collectionName,
|
||||
map: map,
|
||||
reduce: reduce
|
||||
};
|
||||
|
||||
// Add any other options passed in
|
||||
for (let n in options) {
|
||||
if ('scope' === n) {
|
||||
mapCommandHash[n] = processScope(options[n]);
|
||||
} else {
|
||||
// Only include if not in exclusion list
|
||||
if (exclusionList.indexOf(n) === -1) {
|
||||
mapCommandHash[n] = options[n];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
options = Object.assign({}, options);
|
||||
|
||||
// Ensure we have the right read preference inheritance
|
||||
options.readPreference = ReadPreference.resolve(coll, options);
|
||||
|
||||
// If we have a read preference and inline is not set as output fail hard
|
||||
if (
|
||||
options.readPreference !== false &&
|
||||
options.readPreference !== 'primary' &&
|
||||
options['out'] &&
|
||||
options['out'].inline !== 1 &&
|
||||
options['out'] !== 'inline'
|
||||
) {
|
||||
// Force readPreference to primary
|
||||
options.readPreference = 'primary';
|
||||
// Decorate command with writeConcern if supported
|
||||
applyWriteConcern(mapCommandHash, { db: coll.s.db, collection: coll }, options);
|
||||
} else {
|
||||
decorateWithReadConcern(mapCommandHash, coll, options);
|
||||
}
|
||||
|
||||
// Is bypassDocumentValidation specified
|
||||
if (options.bypassDocumentValidation === true) {
|
||||
mapCommandHash.bypassDocumentValidation = options.bypassDocumentValidation;
|
||||
}
|
||||
|
||||
// Have we specified collation
|
||||
try {
|
||||
decorateWithCollation(mapCommandHash, coll, options);
|
||||
} catch (err) {
|
||||
return callback(err, null);
|
||||
}
|
||||
|
||||
// Execute command
|
||||
executeCommand(coll.s.db, mapCommandHash, options, (err, result) => {
|
||||
if (err) return handleCallback(callback, err);
|
||||
// Check if we have an error
|
||||
if (1 !== result.ok || result.err || result.errmsg) {
|
||||
return handleCallback(callback, toError(result));
|
||||
}
|
||||
|
||||
// Create statistics value
|
||||
const stats = {};
|
||||
if (result.timeMillis) stats['processtime'] = result.timeMillis;
|
||||
if (result.counts) stats['counts'] = result.counts;
|
||||
if (result.timing) stats['timing'] = result.timing;
|
||||
|
||||
// invoked with inline?
|
||||
if (result.results) {
|
||||
// If we wish for no verbosity
|
||||
if (options['verbose'] == null || !options['verbose']) {
|
||||
return handleCallback(callback, null, result.results);
|
||||
}
|
||||
|
||||
return handleCallback(callback, null, { results: result.results, stats: stats });
|
||||
}
|
||||
|
||||
// The returned collection
|
||||
let collection = null;
|
||||
|
||||
// If we have an object it's a different db
|
||||
if (result.result != null && typeof result.result === 'object') {
|
||||
const doc = result.result;
|
||||
// Return a collection from another db
|
||||
let Db = loadDb();
|
||||
collection = new Db(doc.db, coll.s.db.s.topology, coll.s.db.s.options).collection(
|
||||
doc.collection
|
||||
);
|
||||
} else {
|
||||
// Create a collection object that wraps the result collection
|
||||
collection = coll.s.db.collection(result.result);
|
||||
}
|
||||
|
||||
// If we wish for no verbosity
|
||||
if (options['verbose'] == null || !options['verbose']) {
|
||||
return handleCallback(callback, err, collection);
|
||||
}
|
||||
|
||||
// Return stats as third set of values
|
||||
handleCallback(callback, err, { collection: collection, stats: stats });
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Functions that are passed as scope args must
|
||||
* be converted to Code instances.
|
||||
* @ignore
|
||||
*/
|
||||
function processScope(scope) {
|
||||
if (!isObject(scope) || scope._bsontype === 'ObjectID') {
|
||||
return scope;
|
||||
}
|
||||
|
||||
const keys = Object.keys(scope);
|
||||
let key;
|
||||
const new_scope = {};
|
||||
|
||||
for (let i = keys.length - 1; i >= 0; i--) {
|
||||
key = keys[i];
|
||||
if ('function' === typeof scope[key]) {
|
||||
new_scope[key] = new Code(String(scope[key]));
|
||||
} else {
|
||||
new_scope[key] = processScope(scope[key]);
|
||||
}
|
||||
}
|
||||
|
||||
return new_scope;
|
||||
}
|
||||
|
||||
module.exports = MapReduceOperation;
|
||||
66
node_modules/mongodb/lib/operations/operation.js
generated
vendored
Normal file
66
node_modules/mongodb/lib/operations/operation.js
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = {
|
||||
READ_OPERATION: Symbol('READ_OPERATION'),
|
||||
WRITE_OPERATION: Symbol('WRITE_OPERATION'),
|
||||
RETRYABLE: Symbol('RETRYABLE'),
|
||||
EXECUTE_WITH_SELECTION: Symbol('EXECUTE_WITH_SELECTION'),
|
||||
NO_INHERIT_OPTIONS: Symbol('NO_INHERIT_OPTIONS')
|
||||
};
|
||||
|
||||
/**
|
||||
* This class acts as a parent class for any operation and is responsible for setting this.options,
|
||||
* as well as setting and getting a session.
|
||||
* Additionally, this class implements `hasAspect`, which determines whether an operation has
|
||||
* a specific aspect.
|
||||
*/
|
||||
class OperationBase {
|
||||
constructor(options) {
|
||||
this.options = Object.assign({}, options);
|
||||
}
|
||||
|
||||
hasAspect(aspect) {
|
||||
if (this.constructor.aspects == null) {
|
||||
return false;
|
||||
}
|
||||
return this.constructor.aspects.has(aspect);
|
||||
}
|
||||
|
||||
set session(session) {
|
||||
Object.assign(this.options, { session });
|
||||
}
|
||||
|
||||
get session() {
|
||||
return this.options.session;
|
||||
}
|
||||
|
||||
clearSession() {
|
||||
delete this.options.session;
|
||||
}
|
||||
|
||||
get canRetryRead() {
|
||||
return true;
|
||||
}
|
||||
|
||||
execute() {
|
||||
throw new TypeError('`execute` must be implemented for OperationBase subclasses');
|
||||
}
|
||||
}
|
||||
|
||||
function defineAspects(operation, aspects) {
|
||||
if (!Array.isArray(aspects) && !(aspects instanceof Set)) {
|
||||
aspects = [aspects];
|
||||
}
|
||||
aspects = new Set(aspects);
|
||||
Object.defineProperty(operation, 'aspects', {
|
||||
value: aspects,
|
||||
writable: false
|
||||
});
|
||||
return aspects;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Aspect,
|
||||
defineAspects,
|
||||
OperationBase
|
||||
};
|
||||
32
node_modules/mongodb/lib/operations/options_operation.js
generated
vendored
Normal file
32
node_modules/mongodb/lib/operations/options_operation.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const MongoError = require('../core').MongoError;
|
||||
|
||||
class OptionsOperation extends OperationBase {
|
||||
constructor(collection, options) {
|
||||
super(options);
|
||||
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
const opts = this.options;
|
||||
|
||||
coll.s.db.listCollections({ name: coll.collectionName }, opts).toArray((err, collections) => {
|
||||
if (err) return handleCallback(callback, err);
|
||||
if (collections.length === 0) {
|
||||
return handleCallback(
|
||||
callback,
|
||||
MongoError.create({ message: `collection ${coll.namespace} not found`, driver: true })
|
||||
);
|
||||
}
|
||||
|
||||
handleCallback(callback, err, collections[0].options || null);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = OptionsOperation;
|
||||
31
node_modules/mongodb/lib/operations/profiling_level.js
generated
vendored
Normal file
31
node_modules/mongodb/lib/operations/profiling_level.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
'use strict';
|
||||
|
||||
const CommandOperation = require('./command');
|
||||
|
||||
class ProfilingLevelOperation extends CommandOperation {
|
||||
constructor(db, command, options) {
|
||||
super(db, options);
|
||||
}
|
||||
|
||||
_buildCommand() {
|
||||
const command = { profile: -1 };
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
super.execute((err, doc) => {
|
||||
if (err == null && doc.ok === 1) {
|
||||
const was = doc.was;
|
||||
if (was === 0) return callback(null, 'off');
|
||||
if (was === 1) return callback(null, 'slow_only');
|
||||
if (was === 2) return callback(null, 'all');
|
||||
return callback(new Error('Error: illegal profiling level value ' + was), null);
|
||||
} else {
|
||||
err != null ? callback(err, null) : callback(new Error('Error with profile command'), null);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ProfilingLevelOperation;
|
||||
33
node_modules/mongodb/lib/operations/re_index.js
generated
vendored
Normal file
33
node_modules/mongodb/lib/operations/re_index.js
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const CommandOperationV2 = require('./command_v2');
|
||||
const serverType = require('../core/sdam/common').serverType;
|
||||
const ServerType = require('../core/sdam/common').ServerType;
|
||||
const MongoError = require('../core').MongoError;
|
||||
|
||||
class ReIndexOperation extends CommandOperationV2 {
|
||||
constructor(collection, options) {
|
||||
super(collection, options);
|
||||
this.collectionName = collection.collectionName;
|
||||
}
|
||||
|
||||
execute(server, callback) {
|
||||
if (serverType(server) !== ServerType.Standalone) {
|
||||
callback(new MongoError(`reIndex can only be executed on standalone servers.`));
|
||||
return;
|
||||
}
|
||||
super.executeCommand(server, { reIndex: this.collectionName }, (err, result) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
callback(null, !!result.ok);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(ReIndexOperation, [Aspect.EXECUTE_WITH_SELECTION]);
|
||||
|
||||
module.exports = ReIndexOperation;
|
||||
52
node_modules/mongodb/lib/operations/remove_user.js
generated
vendored
Normal file
52
node_modules/mongodb/lib/operations/remove_user.js
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const CommandOperation = require('./command');
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const WriteConcern = require('../write_concern');
|
||||
|
||||
class RemoveUserOperation extends CommandOperation {
|
||||
constructor(db, username, options) {
|
||||
const commandOptions = {};
|
||||
|
||||
const writeConcern = WriteConcern.fromOptions(options);
|
||||
if (writeConcern != null) {
|
||||
commandOptions.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
if (options.dbName) {
|
||||
commandOptions.dbName = options.dbName;
|
||||
}
|
||||
|
||||
// Add maxTimeMS to options if set
|
||||
if (typeof options.maxTimeMS === 'number') {
|
||||
commandOptions.maxTimeMS = options.maxTimeMS;
|
||||
}
|
||||
|
||||
super(db, commandOptions);
|
||||
|
||||
this.username = username;
|
||||
}
|
||||
|
||||
_buildCommand() {
|
||||
const username = this.username;
|
||||
|
||||
// Build the command to execute
|
||||
const command = { dropUser: username };
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
// Attempt to execute command
|
||||
super.execute((err, result) => {
|
||||
if (err) return handleCallback(callback, err, null);
|
||||
handleCallback(callback, err, result.ok ? true : false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(RemoveUserOperation, Aspect.WRITE_OPERATION);
|
||||
|
||||
module.exports = RemoveUserOperation;
|
||||
61
node_modules/mongodb/lib/operations/rename.js
generated
vendored
Normal file
61
node_modules/mongodb/lib/operations/rename.js
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const applyWriteConcern = require('../utils').applyWriteConcern;
|
||||
const checkCollectionName = require('../utils').checkCollectionName;
|
||||
const executeDbAdminCommand = require('./db_ops').executeDbAdminCommand;
|
||||
const handleCallback = require('../utils').handleCallback;
|
||||
const loadCollection = require('../dynamic_loaders').loadCollection;
|
||||
const toError = require('../utils').toError;
|
||||
|
||||
class RenameOperation extends OperationBase {
|
||||
constructor(collection, newName, options) {
|
||||
super(options);
|
||||
|
||||
this.collection = collection;
|
||||
this.newName = newName;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
const newName = this.newName;
|
||||
const options = this.options;
|
||||
|
||||
let Collection = loadCollection();
|
||||
// Check the collection name
|
||||
checkCollectionName(newName);
|
||||
// Build the command
|
||||
const renameCollection = coll.namespace;
|
||||
const toCollection = coll.s.namespace.withCollection(newName).toString();
|
||||
const dropTarget = typeof options.dropTarget === 'boolean' ? options.dropTarget : false;
|
||||
const cmd = { renameCollection: renameCollection, to: toCollection, dropTarget: dropTarget };
|
||||
|
||||
// Decorate command with writeConcern if supported
|
||||
applyWriteConcern(cmd, { db: coll.s.db, collection: coll }, options);
|
||||
|
||||
// Execute against admin
|
||||
executeDbAdminCommand(coll.s.db.admin().s.db, cmd, options, (err, doc) => {
|
||||
if (err) return handleCallback(callback, err, null);
|
||||
// We have an error
|
||||
if (doc.errmsg) return handleCallback(callback, toError(doc), null);
|
||||
try {
|
||||
return handleCallback(
|
||||
callback,
|
||||
null,
|
||||
new Collection(
|
||||
coll.s.db,
|
||||
coll.s.topology,
|
||||
coll.s.namespace.db,
|
||||
newName,
|
||||
coll.s.pkFactory,
|
||||
coll.s.options
|
||||
)
|
||||
);
|
||||
} catch (err) {
|
||||
return handleCallback(callback, toError(err), null);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RenameOperation;
|
||||
54
node_modules/mongodb/lib/operations/replace_one.js
generated
vendored
Normal file
54
node_modules/mongodb/lib/operations/replace_one.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const updateDocuments = require('./common_functions').updateDocuments;
|
||||
const hasAtomicOperators = require('../utils').hasAtomicOperators;
|
||||
|
||||
class ReplaceOneOperation extends OperationBase {
|
||||
constructor(collection, filter, replacement, options) {
|
||||
super(options);
|
||||
|
||||
if (hasAtomicOperators(replacement)) {
|
||||
throw new TypeError('Replacement document must not contain atomic operators');
|
||||
}
|
||||
|
||||
this.collection = collection;
|
||||
this.filter = filter;
|
||||
this.replacement = replacement;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
const filter = this.filter;
|
||||
const replacement = this.replacement;
|
||||
const options = this.options;
|
||||
|
||||
// Set single document update
|
||||
options.multi = false;
|
||||
|
||||
// Execute update
|
||||
updateDocuments(coll, filter, replacement, options, (err, r) =>
|
||||
replaceCallback(err, r, replacement, callback)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function replaceCallback(err, r, doc, callback) {
|
||||
if (callback == null) return;
|
||||
if (err && callback) return callback(err);
|
||||
if (r == null) return callback(null, { result: { ok: 1 } });
|
||||
|
||||
r.modifiedCount = r.result.nModified != null ? r.result.nModified : r.result.n;
|
||||
r.upsertedId =
|
||||
Array.isArray(r.result.upserted) && r.result.upserted.length > 0
|
||||
? r.result.upserted[0] // FIXME(major): should be `r.result.upserted[0]._id`
|
||||
: null;
|
||||
r.upsertedCount =
|
||||
Array.isArray(r.result.upserted) && r.result.upserted.length ? r.result.upserted.length : 0;
|
||||
r.matchedCount =
|
||||
Array.isArray(r.result.upserted) && r.result.upserted.length > 0 ? 0 : r.result.n;
|
||||
r.ops = [doc]; // TODO: Should we still have this?
|
||||
if (callback) callback(null, r);
|
||||
}
|
||||
|
||||
module.exports = ReplaceOneOperation;
|
||||
19
node_modules/mongodb/lib/operations/run_command.js
generated
vendored
Normal file
19
node_modules/mongodb/lib/operations/run_command.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
'use strict';
|
||||
|
||||
const CommandOperationV2 = require('./command_v2');
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
const Aspect = require('./operation').Aspect;
|
||||
|
||||
class RunCommandOperation extends CommandOperationV2 {
|
||||
constructor(parent, command, options) {
|
||||
super(parent, options);
|
||||
this.command = command;
|
||||
}
|
||||
execute(server, callback) {
|
||||
const command = this.command;
|
||||
this.executeCommand(server, command, callback);
|
||||
}
|
||||
}
|
||||
defineAspects(RunCommandOperation, [Aspect.EXECUTE_WITH_SELECTION, Aspect.NO_INHERIT_OPTIONS]);
|
||||
|
||||
module.exports = RunCommandOperation;
|
||||
48
node_modules/mongodb/lib/operations/set_profiling_level.js
generated
vendored
Normal file
48
node_modules/mongodb/lib/operations/set_profiling_level.js
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
'use strict';
|
||||
|
||||
const CommandOperation = require('./command');
|
||||
const levelValues = new Set(['off', 'slow_only', 'all']);
|
||||
|
||||
class SetProfilingLevelOperation extends CommandOperation {
|
||||
constructor(db, level, options) {
|
||||
let profile = 0;
|
||||
|
||||
if (level === 'off') {
|
||||
profile = 0;
|
||||
} else if (level === 'slow_only') {
|
||||
profile = 1;
|
||||
} else if (level === 'all') {
|
||||
profile = 2;
|
||||
}
|
||||
|
||||
super(db, options);
|
||||
this.level = level;
|
||||
this.profile = profile;
|
||||
}
|
||||
|
||||
_buildCommand() {
|
||||
const profile = this.profile;
|
||||
|
||||
// Set up the profile number
|
||||
const command = { profile };
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const level = this.level;
|
||||
|
||||
if (!levelValues.has(level)) {
|
||||
return callback(new Error('Error: illegal profiling level value ' + level));
|
||||
}
|
||||
|
||||
super.execute((err, doc) => {
|
||||
if (err == null && doc.ok === 1) return callback(null, level);
|
||||
return err != null
|
||||
? callback(err, null)
|
||||
: callback(new Error('Error with profile command'), null);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SetProfilingLevelOperation;
|
||||
45
node_modules/mongodb/lib/operations/stats.js
generated
vendored
Normal file
45
node_modules/mongodb/lib/operations/stats.js
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
'use strict';
|
||||
|
||||
const Aspect = require('./operation').Aspect;
|
||||
const CommandOperation = require('./command');
|
||||
const defineAspects = require('./operation').defineAspects;
|
||||
|
||||
/**
|
||||
* Get all the collection statistics.
|
||||
*
|
||||
* @class
|
||||
* @property {Collection} a Collection instance.
|
||||
* @property {object} [options] Optional settings. See Collection.prototype.stats for a list of options.
|
||||
*/
|
||||
class StatsOperation extends CommandOperation {
|
||||
/**
|
||||
* Construct a Stats operation.
|
||||
*
|
||||
* @param {Collection} a Collection instance.
|
||||
* @param {object} [options] Optional settings. See Collection.prototype.stats for a list of options.
|
||||
*/
|
||||
constructor(collection, options) {
|
||||
super(collection.s.db, options, collection);
|
||||
}
|
||||
|
||||
_buildCommand() {
|
||||
const collection = this.collection;
|
||||
const options = this.options;
|
||||
|
||||
// Build command object
|
||||
const command = {
|
||||
collStats: collection.collectionName
|
||||
};
|
||||
|
||||
// Check if we have the scale value
|
||||
if (options['scale'] != null) {
|
||||
command['scale'] = options['scale'];
|
||||
}
|
||||
|
||||
return command;
|
||||
}
|
||||
}
|
||||
|
||||
defineAspects(StatsOperation, Aspect.READ_OPERATION);
|
||||
|
||||
module.exports = StatsOperation;
|
||||
34
node_modules/mongodb/lib/operations/update_many.js
generated
vendored
Normal file
34
node_modules/mongodb/lib/operations/update_many.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const updateCallback = require('./common_functions').updateCallback;
|
||||
const updateDocuments = require('./common_functions').updateDocuments;
|
||||
const hasAtomicOperators = require('../utils').hasAtomicOperators;
|
||||
|
||||
class UpdateManyOperation extends OperationBase {
|
||||
constructor(collection, filter, update, options) {
|
||||
super(options);
|
||||
|
||||
if (!hasAtomicOperators(update)) {
|
||||
throw new TypeError('Update document requires atomic operators');
|
||||
}
|
||||
|
||||
this.collection = collection;
|
||||
this.filter = filter;
|
||||
this.update = update;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
const filter = this.filter;
|
||||
const update = this.update;
|
||||
const options = this.options;
|
||||
|
||||
// Set single document update
|
||||
options.multi = true;
|
||||
// Execute update
|
||||
updateDocuments(coll, filter, update, options, (err, r) => updateCallback(err, r, callback));
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = UpdateManyOperation;
|
||||
49
node_modules/mongodb/lib/operations/update_one.js
generated
vendored
Normal file
49
node_modules/mongodb/lib/operations/update_one.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
'use strict';
|
||||
|
||||
const OperationBase = require('./operation').OperationBase;
|
||||
const updateDocuments = require('./common_functions').updateDocuments;
|
||||
const hasAtomicOperators = require('../utils').hasAtomicOperators;
|
||||
|
||||
class UpdateOneOperation extends OperationBase {
|
||||
constructor(collection, filter, update, options) {
|
||||
super(options);
|
||||
|
||||
if (!hasAtomicOperators(update)) {
|
||||
throw new TypeError('Update document requires atomic operators');
|
||||
}
|
||||
|
||||
this.collection = collection;
|
||||
this.filter = filter;
|
||||
this.update = update;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const coll = this.collection;
|
||||
const filter = this.filter;
|
||||
const update = this.update;
|
||||
const options = this.options;
|
||||
|
||||
// Set single document update
|
||||
options.multi = false;
|
||||
// Execute update
|
||||
updateDocuments(coll, filter, update, options, (err, r) => updateCallback(err, r, callback));
|
||||
}
|
||||
}
|
||||
|
||||
function updateCallback(err, r, callback) {
|
||||
if (callback == null) return;
|
||||
if (err) return callback(err);
|
||||
if (r == null) return callback(null, { result: { ok: 1 } });
|
||||
r.modifiedCount = r.result.nModified != null ? r.result.nModified : r.result.n;
|
||||
r.upsertedId =
|
||||
Array.isArray(r.result.upserted) && r.result.upserted.length > 0
|
||||
? r.result.upserted[0] // FIXME(major): should be `r.result.upserted[0]._id`
|
||||
: null;
|
||||
r.upsertedCount =
|
||||
Array.isArray(r.result.upserted) && r.result.upserted.length ? r.result.upserted.length : 0;
|
||||
r.matchedCount =
|
||||
Array.isArray(r.result.upserted) && r.result.upserted.length > 0 ? 0 : r.result.n;
|
||||
callback(null, r);
|
||||
}
|
||||
|
||||
module.exports = UpdateOneOperation;
|
||||
39
node_modules/mongodb/lib/operations/validate_collection.js
generated
vendored
Normal file
39
node_modules/mongodb/lib/operations/validate_collection.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
'use strict';
|
||||
|
||||
const CommandOperation = require('./command');
|
||||
|
||||
class ValidateCollectionOperation extends CommandOperation {
|
||||
constructor(admin, collectionName, options) {
|
||||
// Decorate command with extra options
|
||||
let command = { validate: collectionName };
|
||||
const keys = Object.keys(options);
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
if (options.hasOwnProperty(keys[i]) && keys[i] !== 'session') {
|
||||
command[keys[i]] = options[keys[i]];
|
||||
}
|
||||
}
|
||||
|
||||
super(admin.s.db, options, null, command);
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
execute(callback) {
|
||||
const collectionName = this.collectionName;
|
||||
|
||||
super.execute((err, doc) => {
|
||||
if (err != null) return callback(err, null);
|
||||
|
||||
if (doc.ok === 0) return callback(new Error('Error with validate command'), null);
|
||||
if (doc.result != null && doc.result.constructor !== String)
|
||||
return callback(new Error('Error with validation data'), null);
|
||||
if (doc.result != null && doc.result.match(/exception|corrupt/) != null)
|
||||
return callback(new Error('Error: invalid collection ' + collectionName), null);
|
||||
if (doc.valid != null && !doc.valid)
|
||||
return callback(new Error('Error: invalid collection ' + collectionName), null);
|
||||
|
||||
return callback(null, doc);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ValidateCollectionOperation;
|
||||
Reference in New Issue
Block a user