Commit 821afc5e authored by Xavier Guimard's avatar Xavier Guimard

New upstream version 3.1.13+~3.1.11

parent 6df78e12
......@@ -2,6 +2,42 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
<a name="3.1.13"></a>
## [3.1.13](https://github.com/mongodb/node-mongodb-native/compare/v3.1.12...v3.1.13) (2019-01-23)
### Bug Fixes
* restore ability to webpack by removing `makeLazyLoader` ([050267d](https://github.com/mongodb/node-mongodb-native/commit/050267d))
* **bulk:** honor ignoreUndefined in initializeUnorderedBulkOp ([e806be4](https://github.com/mongodb/node-mongodb-native/commit/e806be4))
* **changeStream:** properly handle changeStream event mid-close ([#1902](https://github.com/mongodb/node-mongodb-native/issues/1902)) ([5ad9fa9](https://github.com/mongodb/node-mongodb-native/commit/5ad9fa9))
* **db_ops:** ensure we async resolve errors in createCollection ([210c71d](https://github.com/mongodb/node-mongodb-native/commit/210c71d))
<a name="3.1.12"></a>
## [3.1.12](https://github.com/mongodb/node-mongodb-native/compare/v3.1.11...v3.1.12) (2019-01-16)
### Features
* **core:** update to mongodb-core v3.1.11 ([9bef6e7](https://github.com/mongodb/node-mongodb-native/commit/9bef6e7))
<a name="3.1.11"></a>
## [3.1.11](https://github.com/mongodb/node-mongodb-native/compare/v3.1.10...v3.1.11) (2019-01-15)
### Bug Fixes
* **bulk:** fix error propagation in empty bulk.execute ([a3adb3f](https://github.com/mongodb/node-mongodb-native/commit/a3adb3f))
* **bulk:** make sure that any error in bulk write is propagated ([bedc2d2](https://github.com/mongodb/node-mongodb-native/commit/bedc2d2))
* **bulk:** properly calculate batch size for bulk writes ([aafe71b](https://github.com/mongodb/node-mongodb-native/commit/aafe71b))
* **operations:** do not call require in a hot path ([ff82ff4](https://github.com/mongodb/node-mongodb-native/commit/ff82ff4))
<a name="3.1.10"></a>
## [3.1.10](https://github.com/mongodb/node-mongodb-native/compare/v3.1.9...v3.1.10) (2018-11-16)
......
......@@ -702,6 +702,13 @@ class BulkOperationBase {
const maxWriteBatchSize =
isMaster && isMaster.maxWriteBatchSize ? isMaster.maxWriteBatchSize : 1000;
// Calculates the largest possible size of an Array key, represented as a BSON string
// element. This calculation:
// 1 byte for BSON type
// # of bytes = length of (string representation of (maxWriteBatchSize - 1))
// + 1 bytes for null terminator
const maxKeySize = (maxWriteBatchSize - 1).toString(10).length + 2;
// Final options for retryable writes and write concern
let finalOptions = Object.assign({}, options);
finalOptions = applyRetryableWrites(finalOptions, collection.s.db);
......@@ -745,6 +752,7 @@ class BulkOperationBase {
// Max batch size options
maxBatchSizeBytes: maxBatchSizeBytes,
maxWriteBatchSize: maxWriteBatchSize,
maxKeySize,
// Namespace
namespace: namespace,
// BSON
......
......@@ -12,6 +12,7 @@ const executeOperation = utils.executeOperation;
const MongoWriteConcernError = require('mongodb-core').MongoWriteConcernError;
const handleMongoWriteConcernError = require('./common').handleMongoWriteConcernError;
const bson = common.bson;
const isPromiseLike = require('../utils').isPromiseLike;
/**
* Add to internal list of Operations
......@@ -35,10 +36,12 @@ function addToOperationsList(bulkOperation, docType, document) {
if (bulkOperation.s.currentBatch == null)
bulkOperation.s.currentBatch = new Batch(docType, bulkOperation.s.currentIndex);
const maxKeySize = bulkOperation.s.maxKeySize;
// Check if we need to create a new batch
if (
bulkOperation.s.currentBatchSize + 1 >= bulkOperation.s.maxWriteBatchSize ||
bulkOperation.s.currentBatchSizeBytes + bulkOperation.s.currentBatchSizeBytes >=
bulkOperation.s.currentBatchSizeBytes + maxKeySize + bsonSize >=
bulkOperation.s.maxBatchSizeBytes ||
bulkOperation.s.currentBatch.batchType !== docType
) {
......@@ -51,10 +54,6 @@ function addToOperationsList(bulkOperation, docType, document) {
// Reset the current size trackers
bulkOperation.s.currentBatchSize = 0;
bulkOperation.s.currentBatchSizeBytes = 0;
} else {
// Update current batch size
bulkOperation.s.currentBatchSize = bulkOperation.s.currentBatchSize + 1;
bulkOperation.s.currentBatchSizeBytes = bulkOperation.s.currentBatchSizeBytes + bsonSize;
}
if (docType === common.INSERT) {
......@@ -67,13 +66,14 @@ function addToOperationsList(bulkOperation, docType, document) {
// We have an array of documents
if (Array.isArray(document)) {
throw toError('operation passed in cannot be an Array');
} else {
bulkOperation.s.currentBatch.originalIndexes.push(bulkOperation.s.currentIndex);
bulkOperation.s.currentBatch.operations.push(document);
bulkOperation.s.currentBatchSizeBytes = bulkOperation.s.currentBatchSizeBytes + bsonSize;
bulkOperation.s.currentIndex = bulkOperation.s.currentIndex + 1;
}
bulkOperation.s.currentBatch.originalIndexes.push(bulkOperation.s.currentIndex);
bulkOperation.s.currentBatch.operations.push(document);
bulkOperation.s.currentBatchSize += 1;
bulkOperation.s.currentBatchSizeBytes += maxKeySize + bsonSize;
bulkOperation.s.currentIndex += 1;
// Return bulkOperation
return bulkOperation;
}
......@@ -115,6 +115,10 @@ class OrderedBulkOperation extends BulkOperationBase {
*/
execute(_writeConcern, options, callback) {
const ret = this.bulkExecute(_writeConcern, options, callback);
if (isPromiseLike(ret)) {
return ret;
}
options = ret.options;
callback = ret.callback;
......
......@@ -12,6 +12,7 @@ const executeOperation = utils.executeOperation;
const MongoWriteConcernError = require('mongodb-core').MongoWriteConcernError;
const handleMongoWriteConcernError = require('./common').handleMongoWriteConcernError;
const bson = common.bson;
const isPromiseLike = require('../utils').isPromiseLike;
/**
* Add to internal list of Operations
......@@ -40,6 +41,8 @@ function addToOperationsList(bulkOperation, docType, document) {
bulkOperation.s.currentBatch = bulkOperation.s.currentRemoveBatch;
}
const maxKeySize = bulkOperation.s.maxKeySize;
// Create a new batch object if we don't have a current one
if (bulkOperation.s.currentBatch == null)
bulkOperation.s.currentBatch = new Batch(docType, bulkOperation.s.currentIndex);
......@@ -47,7 +50,8 @@ function addToOperationsList(bulkOperation, docType, document) {
// Check if we need to create a new batch
if (
bulkOperation.s.currentBatch.size + 1 >= bulkOperation.s.maxWriteBatchSize ||
bulkOperation.s.currentBatch.sizeBytes + bsonSize >= bulkOperation.s.maxBatchSizeBytes ||
bulkOperation.s.currentBatch.sizeBytes + maxKeySize + bsonSize >=
bulkOperation.s.maxBatchSizeBytes ||
bulkOperation.s.currentBatch.batchType !== docType
) {
// Save the batch to the execution stack
......@@ -60,12 +64,12 @@ function addToOperationsList(bulkOperation, docType, document) {
// We have an array of documents
if (Array.isArray(document)) {
throw toError('operation passed in cannot be an Array');
} else {
bulkOperation.s.currentBatch.operations.push(document);
bulkOperation.s.currentBatch.originalIndexes.push(bulkOperation.s.currentIndex);
bulkOperation.s.currentIndex = bulkOperation.s.currentIndex + 1;
}
bulkOperation.s.currentBatch.operations.push(document);
bulkOperation.s.currentBatch.originalIndexes.push(bulkOperation.s.currentIndex);
bulkOperation.s.currentIndex = bulkOperation.s.currentIndex + 1;
// Save back the current Batch to the right type
if (docType === common.INSERT) {
bulkOperation.s.currentInsertBatch = bulkOperation.s.currentBatch;
......@@ -80,8 +84,8 @@ function addToOperationsList(bulkOperation, docType, document) {
}
// Update current batch size
bulkOperation.s.currentBatch.size = bulkOperation.s.currentBatch.size + 1;
bulkOperation.s.currentBatch.sizeBytes = bulkOperation.s.currentBatch.sizeBytes + bsonSize;
bulkOperation.s.currentBatch.size += 1;
bulkOperation.s.currentBatch.sizeBytes += maxKeySize + bsonSize;
// Return bulkOperation
return bulkOperation;
......@@ -123,6 +127,10 @@ class UnorderedBulkOperation extends BulkOperationBase {
*/
execute(_writeConcern, options, callback) {
const ret = this.bulkExecute(_writeConcern, options, callback);
if (isPromiseLike(ret)) {
return ret;
}
options = ret.options;
callback = ret.callback;
......@@ -169,9 +177,18 @@ function executeBatch(bulkOperation, batch, options, callback) {
*/
function executeBatches(bulkOperation, options, callback) {
let numberOfCommandsToExecute = bulkOperation.s.batches.length;
let hasErrored = false;
// Execute over all the batches
for (let i = 0; i < bulkOperation.s.batches.length; i++) {
executeBatch(bulkOperation, bulkOperation.s.batches[i], options, function(err) {
if (hasErrored) {
return;
}
if (err) {
hasErrored = true;
return handleCallback(callback, err);
}
// Count down the number of commands left to execute
numberOfCommandsToExecute = numberOfCommandsToExecute - 1;
......
......@@ -368,6 +368,20 @@ function processNewChange(args) {
const change = args.change;
const callback = args.callback;
const eventEmitter = args.eventEmitter || false;
// If the changeStream is closed, then it should not process a change.
if (changeStream.isClosed()) {
// We do not error in the eventEmitter case.
if (eventEmitter) {
return;
}
const error = new MongoError('ChangeStream is closed');
return typeof callback === 'function'
? callback(error, null)
: changeStream.promiseLibrary.reject(error);
}
const topology = changeStream.topology;
const options = changeStream.cursor.options;
......
......@@ -804,7 +804,7 @@ Collection.prototype.updateMany = function(filter, update, options, callback) {
* Updates documents.
* @method
* @param {object} selector The selector for the update operation.
* @param {object} document The update document.
* @param {object} update The update operations to be applied to the documents
* @param {object} [options] Optional settings.
* @param {(number|string)} [options.w] The write concern.
* @param {number} [options.wtimeout] The write concern timeout.
......@@ -820,7 +820,7 @@ Collection.prototype.updateMany = function(filter, update, options, callback) {
* @return {Promise} returns Promise if no callback passed
* @deprecated use updateOne, updateMany or bulkWrite
*/
Collection.prototype.update = deprecate(function(selector, document, options, callback) {
Collection.prototype.update = deprecate(function(selector, update, options, callback) {
if (typeof options === 'function') (callback = options), (options = {});
options = options || {};
......@@ -833,7 +833,7 @@ Collection.prototype.update = deprecate(function(selector, document, options, ca
return executeOperation(this.s.topology, updateDocuments, [
this,
selector,
document,
update,
options,
callback
]);
......@@ -1692,6 +1692,7 @@ Collection.prototype.findAndRemove = deprecate(function(query, sort, options, ca
* @param {boolean} [options.promoteBuffers=false] Promotes Binary BSON values to native Node Buffers.
* @param {object} [options.collation] Specify collation (MongoDB 3.4 or higher) settings for update operation (see 3.4 documentation for available fields).
* @param {string} [options.comment] Add a comment to an aggregation command
* @param {string|object} [options.hint] Add an index selection hint to an aggregation command
* @param {ClientSession} [options.session] optional session to use for this operation
* @param {Collection~aggregationCallback} callback The command result callback
* @return {(null|AggregationCursor)}
......@@ -2047,11 +2048,17 @@ Collection.prototype.mapReduce = function(map, reduce, options, callback) {
* @param {(number|string)} [options.w] The write concern.
* @param {number} [options.wtimeout] The write concern timeout.
* @param {boolean} [options.j=false] Specify a journal write concern.
* @param {boolean} [options.ignoreUndefined=false] Specify if the BSON serializer should ignore undefined fields.
* @param {ClientSession} [options.session] optional session to use for this operation
* @return {UnorderedBulkOperation}
*/
Collection.prototype.initializeUnorderedBulkOp = function(options) {
options = options || {};
// Give function's options precedence over session options.
if (options.ignoreUndefined == null) {
options.ignoreUndefined = this.s.options.ignoreUndefined;
}
options.promiseLibrary = this.s.promiseLibrary;
return unordered(this.s.topology, this, options);
};
......@@ -2065,11 +2072,16 @@ Collection.prototype.initializeUnorderedBulkOp = function(options) {
* @param {number} [options.wtimeout] The write concern timeout.
* @param {boolean} [options.j=false] Specify a journal write concern.
* @param {ClientSession} [options.session] optional session to use for this operation
* @param {boolean} [options.ignoreUndefined=false] Specify if the BSON serializer should ignore undefined fields.
* @param {OrderedBulkOperation} callback The command result callback
* @return {null}
*/
Collection.prototype.initializeOrderedBulkOp = function(options) {
options = options || {};
// Give function's options precedence over session's options.
if (options.ignoreUndefined == null) {
options.ignoreUndefined = this.s.options.ignoreUndefined;
}
options.promiseLibrary = this.s.promiseLibrary;
return ordered(this.s.topology, this, options);
};
......
......@@ -22,6 +22,21 @@ const MongoError = require('mongodb-core').MongoError;
const ReadPreference = require('mongodb-core').ReadPreference;
const toError = require('../utils').toError;
let collection;
function loadCollection() {
if (!collection) {
collection = require('../collection');
}
return collection;
}
let db;
function loadDb() {
if (!db) {
db = require('../db');
}
return db;
}
/**
* Group function helper
* @ignore
......@@ -987,7 +1002,7 @@ function mapReduce(coll, map, reduce, options, callback) {
if (result.result != null && typeof result.result === 'object') {
const doc = result.result;
// Return a collection from another db
const Db = require('../db');
let Db = loadDb();
collection = new Db(doc.db, coll.s.db.s.topology, coll.s.db.s.options).collection(
doc.collection
);
......@@ -1204,7 +1219,7 @@ function removeDocuments(coll, selector, options, callback) {
* @param {Collection~collectionResultCallback} [callback] The results callback
*/
function rename(coll, newName, options, callback) {
const Collection = require('../collection');
let Collection = loadCollection();
// Check the collection name
checkCollectionName(newName);
// Build the command
......
......@@ -6,6 +6,14 @@ const handleCallback = require('../utils').handleCallback;
const MongoError = require('mongodb-core').MongoError;
const push = Array.prototype.push;
let cursor;
function loadCursor() {
if (!cursor) {
cursor = require('../cursor');
}
return cursor;
}
/**
* Get the count of documents for this cursor.
*
......@@ -74,7 +82,7 @@ function count(cursor, applySkipLimit, opts, callback) {
* @param {Cursor~resultCallback} callback The result callback.
*/
function each(cursor, callback) {
const Cursor = require('../cursor');
let Cursor = loadCursor();
if (!callback) throw MongoError.create({ message: 'callback is mandatory', driver: true });
if (cursor.isNotified()) return;
......@@ -114,7 +122,7 @@ function each(cursor, callback) {
* @param {Cursor~resultCallback} [callback] The result callback.
*/
function hasNext(cursor, callback) {
const Cursor = require('../cursor');
let Cursor = loadCursor();
if (cursor.s.currentDoc) {
return callback(null, true);
......@@ -165,7 +173,7 @@ function next(cursor, callback) {
// Get the next available document from the cursor, returns null if no more documents are available.
function nextObject(cursor, callback) {
const Cursor = require('../cursor');
let Cursor = loadCursor();
if (cursor.s.state === Cursor.CLOSED || (cursor.isDead && cursor.isDead()))
return handleCallback(
......@@ -196,7 +204,7 @@ function nextObject(cursor, callback) {
* @param {Cursor~toArrayResultCallback} [callback] The result callback.
*/
function toArray(cursor, callback) {
const Cursor = require('../cursor');
let Cursor = loadCursor();
const items = [];
......
......@@ -17,6 +17,21 @@ const findOne = require('./collection_ops').findOne;
const remove = require('./collection_ops').remove;
const updateOne = require('./collection_ops').updateOne;
let collection;
function loadCollection() {
if (!collection) {
collection = require('../collection');
}
return collection;
}
let db;
function loadDb() {
if (!db) {
db = require('../db');
}
return db;
}
const debugFields = [
'authSource',
'w',
......@@ -64,7 +79,7 @@ const illegalCommandFields = [
* @param {Db~resultCallback} [callback] The command result callback
*/
function addUser(db, username, password, options, callback) {
const Db = require('../db');
let Db = loadDb();
// Did the user destroy the topology
if (db.serverConfig && db.serverConfig.isDestroyed())
......@@ -132,7 +147,7 @@ function addUser(db, username, password, options, callback) {
* @param {Db~collectionsResultCallback} [callback] The results callback
*/
function collections(db, options, callback) {
const Collection = require('../collection');
let Collection = loadCollection();
options = Object.assign({}, options, { nameOnly: true });
// Let's get the collection names
......@@ -172,7 +187,7 @@ function collections(db, options, callback) {
* @param {Db~collectionResultCallback} [callback] The results callback
*/
function createCollection(db, name, options, callback) {
const Collection = require('../collection');
let Collection = loadCollection();
// Get the write concern options
const finalOptions = applyWriteConcern(Object.assign({}, options), { db }, options);
......@@ -233,11 +248,16 @@ function createCollection(db, name, options, callback) {
// Execute command
executeCommand(db, cmd, finalOptions, err => {
if (err) return handleCallback(callback, err);
handleCallback(
callback,
null,
new Collection(db, db.s.topology, db.s.databaseName, name, db.s.pkFactory, options)
);
try {
return handleCallback(
callback,
null,
new Collection(db, db.s.topology, db.s.databaseName, name, db.s.pkFactory, options)
);
} catch (err) {
return handleCallback(callback, err);
}
});
});
}
......@@ -632,7 +652,7 @@ function profilingLevel(db, options, callback) {
* @param {Db~resultCallback} [callback] The command result callback
*/
function removeUser(db, username, options, callback) {
const Db = require('../db');
let Db = loadDb();
// Attempt to execute command
executeAuthRemoveUserCommand(db, username, options, (err, result) => {
......
......@@ -11,6 +11,14 @@ const ReplSet = require('../topologies/replset');
const Server = require('../topologies/server');
const ServerSessionPool = require('mongodb-core').Sessions.ServerSessionPool;
let client;
function loadClient() {
if (!client) {
client = require('../mongo_client');
}
return client;
}
const monitoringEvents = [
'timeout',
'close',
......@@ -127,7 +135,7 @@ function clearAllEvents(topology) {
// Collect all events in order from SDAM
function collectEvents(mongoClient, topology) {
const MongoClient = require('../mongo_client');
let MongoClient = loadClient();
const collectedEvents = [];
if (mongoClient instanceof MongoClient) {
......
......@@ -604,7 +604,10 @@ function decorateWithCollation(command, target, options) {
* @param {object} command the command on which to apply the read concern
* @param {Collection} coll the parent collection of the operation calling this method
*/
function decorateWithReadConcern(command, coll) {
function decorateWithReadConcern(command, coll, options) {
if (options && options.session && options.session.inTransaction()) {
return;
}
let readConcern = Object.assign({}, command.readConcern || {});
if (coll.s.readConcern) {
Object.assign(readConcern, coll.s.readConcern);
......
......@@ -2,6 +2,27 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
<a name="3.1.11"></a>
## [3.1.11](https://github.com/mongodb-js/mongodb-core/compare/v3.1.10...v3.1.11) (2019-01-16)
### Bug Fixes
* **wire-protocol:** don't allow override of `slaveOk` ([8fcef69](https://github.com/mongodb-js/mongodb-core/commit/8fcef69))
<a name="3.1.10"></a>
## [3.1.10](https://github.com/mongodb-js/mongodb-core/compare/v3.1.9...v3.1.10) (2019-01-15)
### Bug Fixes
* **mongos-replset:** pass connect options to child server instances ([7ffb4bb](https://github.com/mongodb-js/mongodb-core/commit/7ffb4bb))
* **prettier:** fix prettier file paths for Windows ([00c631e](https://github.com/mongodb-js/mongodb-core/commit/00c631e))
<a name="3.1.9"></a>
## [3.1.9](https://github.com/mongodb-js/mongodb-core/compare/v3.1.8...v3.1.9) (2018-11-16)
......
......@@ -90,8 +90,7 @@ const extractCommand = command => {
}
Object.keys(LEGACY_FIND_OPTIONS_MAP).forEach(key => {
if (typeof command.options[key] !== 'undefined')
result[LEGACY_FIND_OPTIONS_MAP[key]] = command.options[key];
if (typeof command[key] !== 'undefined') result[LEGACY_FIND_OPTIONS_MAP[key]] = command[key];
});
OP_QUERY_KEYS.forEach(key => {
......
......@@ -22,7 +22,7 @@ var OPTS_EXHAUST = 64;
var OPTS_PARTIAL = 128;
// Response flags
var CURSOR_NOT_FOUND = 0;
var CURSOR_NOT_FOUND = 1;
var QUERY_FAILURE = 2;
var SHARD_CONFIG_STALE = 4;
var AWAIT_CAPABLE = 8;
......@@ -46,9 +46,6 @@ var Query = function(bson, ns, query, options) {
this.ns = ns;
this.query = query;
// Ensure empty options
this.options = options || {};
// Additional options
this.numberToSkip = options.numberToSkip || 0;
this.numberToReturn = options.numberToReturn || 0;
......
......@@ -8,8 +8,8 @@ const mongoErrorContextSymbol = require('./error').mongoErrorContextSymbol;
const f = require('util').format;
const collationNotSupported = require('./utils').collationNotSupported;
var BSON = retrieveBSON(),
Long = BSON.Long;
const BSON = retrieveBSON();
const Long = BSON.Long;
/**
* This is a cursor results callback
......@@ -105,7 +105,8 @@ var Cursor = function(bson, ns, cmd, options, topology, topologyOptions) {
batchSize: options.batchSize || cmd.batchSize || 1000,
currentLimit: 0,
// Result field name if not a cursor (contains the array of results)
transforms: options.transforms
transforms: options.transforms,
raw: options.raw || (cmd && cmd.raw)
};
if (typeof options.session === 'object') {
......@@ -209,129 +210,9 @@ var handleCallback = function(callback, err, result) {
};
// Internal methods
Cursor.prototype._find = function(callback) {
var self = this;
if (self.logger.isDebug()) {
self.logger.debug(
f(
'issue initial query [%s] with flags [%s]',
JSON.stringify(self.cmd),
JSON.stringify(self.query)
)
);
}
var queryCallback = function(err, r) {
if (err) return callback(err);
// Get the raw message
var result = r.message;
// Query failure bit set
if (result.queryFailure) {
return callback(new MongoError(result.documents[0]), null);
}
// Check if we have a command cursor
if (
Array.isArray(result.documents) &&
result.documents.length === 1 &&
(!self.cmd.find || (self.cmd.find && self.cmd.virtual === false)) &&
(typeof result.documents[0].cursor !== 'string' ||
result.documents[0]['$err'] ||
result.documents[0]['errmsg'] ||
Array.isArray(result.documents[0].result))
) {
// We have a an error document return the error
if (result.documents[0]['$err'] || result.documents[0]['errmsg']) {
return callback(new MongoError(result.documents[0]), null);
}
// We have a cursor document
if (result.documents[0].cursor != null && typeof result.documents[0].cursor !== 'string') {
var id = result.documents[0].cursor.id;
// If we have a namespace change set the new namespace for getmores
if (result.documents[0].cursor.ns) {
self.ns = result.documents[0].cursor.ns;
}
// Promote id to long if needed
self.cursorState.cursorId = typeof id === 'number' ? Long.fromNumber(id) : id;
self.cursorState.lastCursorId = self.cursorState.cursorId;
self.cursorState.operationTime = result.documents[0].operationTime;
// If we have a firstBatch set it
if (Array.isArray(result.documents[0].cursor.firstBatch)) {
self.cursorState.documents = result.documents[0].cursor.firstBatch; //.reverse();
}
// Return after processing command cursor
return callback(null, result);
}
if (Array.isArray(result.documents[0].result)) {
self.cursorState.documents = result.documents[0].result;
self.cursorState.cursorId = Long.ZERO;
return callback(null, result);
}
}
// Otherwise fall back to regular find path
self.cursorState.cursorId = result.cursorId;
self.cursorState.documents = result.documents;
self.cursorState.lastCursorId = result.cursorId;
// Transform the results with passed in transformation method if provided
if (self.cursorState.transforms && typeof self.cursorState.transforms.query === 'function') {
self.cursorState.documents = self.cursorState.transforms.query(result);
}
// Return callback
callback(null, result);
};
// Options passed to the pool
var queryOptions = {};
// If we have a raw query decorate the function
if (self.options.raw || self.cmd.raw) {
// queryCallback.raw = self.options.raw || self.cmd.raw;
queryOptions.raw = self.options.raw || self.cmd.raw;
}
// Do we have documentsReturnedIn set on the query
if (typeof self.query.documentsReturnedIn === 'string') {
// queryCallback.documentsReturnedIn = self.query.documentsReturnedIn;
queryOptions.documentsReturnedIn = self.query.documentsReturnedIn;
}
// Add promote Long value if defined
if (typeof self.cursorState.promoteLongs === 'boolean') {
queryOptions.promoteLongs = self.cursorState.promoteLongs;
}
// Add promote values if defined
if (typeof self.cursorState.promoteValues === 'boolean') {
queryOptions.promoteValues = self.cursorState.promoteValues;
}
// Add promote values if defined
if (typeof self.cursorState.promoteBuffers === 'boolean') {
queryOptions.promoteBuffers = self.cursorState.promoteBuffers;
}
if (typeof self.cursorState.session === 'object') {
queryOptions.session = self.cursorState.session;
}
// Write the initial command out
self.server.s.pool.write(self.query, queryOptions, queryCallback);
};
Cursor.prototype._getmore = function(callback) {
if (this.logger.isDebug())
this.logger.debug(f('schedule getMore call for query [%s]', JSON.stringify(this.query)));
// Determine if it's a raw query
var raw = this.options.raw || this.cmd.raw;
// Set the current batchSize
var batchSize = this.cursorState.batchSize;
......@@ -342,17 +223,11 @@ Cursor.prototype._getmore = function(callback) {
batchSize = this.cursorState.limit - this.cursorState.currentLimit;
}
// Default pool
var pool = this.server.s.pool;