Commit 5e112f87 authored by Xavier Guimard's avatar Xavier Guimard

New upstream version 3.2.2+~3.2.2

parent 821afc5e
......@@ -2,6 +2,56 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
<a name="3.2.2"></a>
## [3.2.2](https://github.com/mongodb/node-mongodb-native/compare/v3.2.1...v3.2.2) (2019-03-22)
### Bug Fixes
* **asyncIterator:** stronger guard against importing async generator ([e0826fb](https://github.com/mongodb/node-mongodb-native/commit/e0826fb))
### Features
* update to mongodb-core v3.2.2 ([868cfc3](https://github.com/mongodb/node-mongodb-native/commit/868cfc3))
<a name="3.2.1"></a>
## [3.2.1](https://github.com/mongodb/node-mongodb-native/compare/v3.2.0...v3.2.1) (2019-03-21)
### Features
* **core:** update to mongodb-core v3.2.1 ([30b0100](https://github.com/mongodb/node-mongodb-native/commit/30b0100))
<a name="3.2.0"></a>
# [3.2.0](https://github.com/mongodb/node-mongodb-native/compare/v3.1.13...v3.2.0) (2019-03-21)
### Bug Fixes
* **aggregate:** do not send batchSize for aggregation with $out ([ddb8d90](https://github.com/mongodb/node-mongodb-native/commit/ddb8d90))
* **bulkWrite:** always count undefined values in bson size for bulk ([436d340](https://github.com/mongodb/node-mongodb-native/commit/436d340))
* **db_ops:** rename db to add user on ([79931af](https://github.com/mongodb/node-mongodb-native/commit/79931af))
* **mongo_client_ops:** only skip authentication if no authMechanism is specified ([3b6957d](https://github.com/mongodb/node-mongodb-native/commit/3b6957d))
* **mongo-client:** ensure close callback is called with client ([f39e881](https://github.com/mongodb/node-mongodb-native/commit/f39e881))
### Features
* **core:** pin to mongodb-core v3.2.0 ([22af15a](https://github.com/mongodb/node-mongodb-native/commit/22af15a))
* **Cursor:** adds support for AsyncIterator in cursors ([b972c1e](https://github.com/mongodb/node-mongodb-native/commit/b972c1e))
* **db:** add database-level aggregation ([b629b21](https://github.com/mongodb/node-mongodb-native/commit/b629b21))
* **mongo-client:** remove deprecated `logout` and print warning ([542859d](https://github.com/mongodb/node-mongodb-native/commit/542859d))
* **topology-base:** support passing callbacks to `close` method ([7c111e0](https://github.com/mongodb/node-mongodb-native/commit/7c111e0))
* **transactions:** support pinning mongos for sharded txns ([3886127](https://github.com/mongodb/node-mongodb-native/commit/3886127))
* **unified-sdam:** backport unified SDAM to master for v3.2.0 ([79f33ca](https://github.com/mongodb/node-mongodb-native/commit/79f33ca))
<a name="3.1.13"></a>
## [3.1.13](https://github.com/mongodb/node-mongodb-native/compare/v3.1.12...v3.1.13) (2019-01-23)
......@@ -222,6 +272,19 @@ All notable changes to this project will be documented in this file. See [standa
<a name="3.1.1"></a>
## [3.1.1](https://github.com/mongodb/node-mongodb-native/compare/v3.1.0...v3.1.1) (2018-07-05)
### Bug Fixes
* **client-ops:** return transform map to map rather than function ([b8b4bfa](https://github.com/mongodb/node-mongodb-native/commit/b8b4bfa))
* **collection:** correctly shallow clone passed in options ([2e6c4fa](https://github.com/mongodb/node-mongodb-native/commit/2e6c4fa))
* **collection:** countDocuments throws error when query doesn't match docs ([4e83556](https://github.com/mongodb/node-mongodb-native/commit/4e83556))
* **server:** remove unnecessary print statement ([20e11b3](https://github.com/mongodb/node-mongodb-native/commit/20e11b3))
<a name="3.1.0"></a>
# [3.1.0](https://github.com/mongodb/node-mongodb-native/compare/v3.0.6...v3.1.0) (2018-06-27)
......
......@@ -40,6 +40,13 @@ Core Server (i.e. SERVER) project are **public**.
Change history can be found in [`HISTORY.md`](HISTORY.md).
### Compatibility
For version compatibility matrices, please refer to the following links:
* [MongoDB](https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#reference-compatibility-mongodb-node)
* [NodeJS](https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#reference-compatibility-language-node)
# Installation
The recommended way to get started using the Node.js 3.0 driver is by using the `npm` (Node Package Manager) to install the dependency in your project.
......
<a name="1.1.1"></a>
## [1.1.1](https://github.com/mongodb/js-bson/compare/v1.1.0...v1.1.1) (2019-03-08)
### Bug Fixes
* **object-id:** support 4.x->1.x interop for MinKey and ObjectId ([53419a5](https://github.com/mongodb/js-bson/commit/53419a5))
### Features
* replace new Buffer with modern versions ([24aefba](https://github.com/mongodb/js-bson/commit/24aefba))
<a name="1.1.0"></a>
# [1.1.0](https://github.com/mongodb/js-bson/compare/v1.0.9...v1.1.0) (2018-08-13)
......
This diff is collapsed.
......@@ -9,6 +9,8 @@ if (typeof global !== 'undefined') {
var Buffer = require('buffer').Buffer; // TODO just use global Buffer
}
var utils = require('./parser/utils');
/**
* A class representation of the BSON Binary type.
*
......@@ -53,7 +55,7 @@ function Binary(buffer, subType) {
if (typeof buffer === 'string') {
// Different ways of writing the length of the string for the different types
if (typeof Buffer !== 'undefined') {
this.buffer = new Buffer(buffer);
this.buffer = utils.toBuffer(buffer);
} else if (
typeof Uint8Array !== 'undefined' ||
Object.prototype.toString.call(buffer) === '[object Array]'
......@@ -68,7 +70,7 @@ function Binary(buffer, subType) {
this.position = buffer.length;
} else {
if (typeof Buffer !== 'undefined') {
this.buffer = new Buffer(Binary.BUFFER_SIZE);
this.buffer = utils.allocBuffer(Binary.BUFFER_SIZE);
} else if (typeof Uint8Array !== 'undefined') {
this.buffer = new Uint8Array(new ArrayBuffer(Binary.BUFFER_SIZE));
} else {
......@@ -107,7 +109,7 @@ Binary.prototype.put = function put(byte_value) {
} else {
if (typeof Buffer !== 'undefined' && Buffer.isBuffer(this.buffer)) {
// Create additional overflow buffer
var buffer = new Buffer(Binary.BUFFER_SIZE + this.buffer.length);
var buffer = utils.allocBuffer(Binary.BUFFER_SIZE + this.buffer.length);
// Combine the two buffers together
this.buffer.copy(buffer, 0, 0, this.buffer.length);
this.buffer = buffer;
......@@ -150,7 +152,7 @@ Binary.prototype.write = function write(string, offset) {
var buffer = null;
// If we are in node.js
if (typeof Buffer !== 'undefined' && Buffer.isBuffer(this.buffer)) {
buffer = new Buffer(this.buffer.length + string.length);
buffer = utils.allocBuffer(this.buffer.length + string.length);
this.buffer.copy(buffer, 0, 0, this.buffer.length);
} else if (Object.prototype.toString.call(this.buffer) === '[object Uint8Array]') {
// Create a new buffer
......
......@@ -18,7 +18,8 @@ var Map = require('./map'),
// Parts of the parser
var deserialize = require('./parser/deserializer'),
serializer = require('./parser/serializer'),
calculateObjectSize = require('./parser/calculate_size');
calculateObjectSize = require('./parser/calculate_size'),
utils = require('./parser/utils');
/**
* @ignore
......@@ -28,7 +29,7 @@ var deserialize = require('./parser/deserializer'),
var MAXSIZE = 1024 * 1024 * 17;
// Current Internal Temporary Serialization Buffer
var buffer = new Buffer(MAXSIZE);
var buffer = utils.allocBuffer(MAXSIZE);
var BSON = function() {};
......@@ -56,7 +57,7 @@ BSON.prototype.serialize = function serialize(object, options) {
// Resize the internal serialization buffer if needed
if (buffer.length < minInternalBufferSize) {
buffer = new Buffer(minInternalBufferSize);
buffer = utils.allocBuffer(minInternalBufferSize);
}
// Attempt to serialize
......@@ -71,7 +72,7 @@ BSON.prototype.serialize = function serialize(object, options) {
[]
);
// Create the final buffer
var finishedBuffer = new Buffer(serializationIndex);
var finishedBuffer = utils.allocBuffer(serializationIndex);
// Copy into the finished buffer
buffer.copy(finishedBuffer, 0, 0, finishedBuffer.length);
// Return the buffer
......
......@@ -70,6 +70,8 @@ var INF_POSITIVE_BUFFER = [
var EXPONENT_REGEX = /^([-+])?(\d+)?$/;
var utils = require('./parser/utils');
// Detect if the value is a digit
var isDigit = function(value) {
return !isNaN(parseInt(value, 10));
......@@ -143,7 +145,7 @@ var lessThan = function(left, right) {
};
// var longtoHex = function(value) {
// var buffer = new Buffer(8);
// var buffer = utils.allocBuffer(8);
// var index = 0;
// // Encode the low 64 bits of the decimal
// // Encode low bits
......@@ -160,7 +162,7 @@ var lessThan = function(left, right) {
// };
// var int32toHex = function(value) {
// var buffer = new Buffer(4);
// var buffer = utils.allocBuffer(4);
// var index = 0;
// // Encode the low 64 bits of the decimal
// // Encode low bits
......@@ -265,9 +267,9 @@ Decimal128.fromString = function(string) {
// Check if user passed Infinity or NaN
if (!isDigit(string[index]) && string[index] !== '.') {
if (string[index] === 'i' || string[index] === 'I') {
return new Decimal128(new Buffer(isNegative ? INF_NEGATIVE_BUFFER : INF_POSITIVE_BUFFER));
return new Decimal128(utils.toBuffer(isNegative ? INF_NEGATIVE_BUFFER : INF_POSITIVE_BUFFER));
} else if (string[index] === 'N') {
return new Decimal128(new Buffer(NAN_BUFFER));
return new Decimal128(utils.toBuffer(NAN_BUFFER));
}
}
......@@ -275,7 +277,7 @@ Decimal128.fromString = function(string) {
while (isDigit(string[index]) || string[index] === '.') {
if (string[index] === '.') {
if (sawRadix) {
return new Decimal128(new Buffer(NAN_BUFFER));
return new Decimal128(utils.toBuffer(NAN_BUFFER));
}
sawRadix = true;
......@@ -320,7 +322,7 @@ Decimal128.fromString = function(string) {
// No digits read
if (!match || !match[2]) {
return new Decimal128(new Buffer(NAN_BUFFER));
return new Decimal128(utils.toBuffer(NAN_BUFFER));
}
// Get exponent
......@@ -332,7 +334,7 @@ Decimal128.fromString = function(string) {
// Return not a number
if (string[index]) {
return new Decimal128(new Buffer(NAN_BUFFER));
return new Decimal128(utils.toBuffer(NAN_BUFFER));
}
// Done reading input
......@@ -380,7 +382,7 @@ Decimal128.fromString = function(string) {
exponent = EXPONENT_MAX;
break;
} else {
return new Decimal128(new Buffer(isNegative ? INF_NEGATIVE_BUFFER : INF_POSITIVE_BUFFER));
return new Decimal128(utils.toBuffer(isNegative ? INF_NEGATIVE_BUFFER : INF_POSITIVE_BUFFER));
}
}
......@@ -412,7 +414,7 @@ Decimal128.fromString = function(string) {
exponent = EXPONENT_MAX;
break;
} else {
return new Decimal128(new Buffer(isNegative ? INF_NEGATIVE_BUFFER : INF_POSITIVE_BUFFER));
return new Decimal128(utils.toBuffer(isNegative ? INF_NEGATIVE_BUFFER : INF_POSITIVE_BUFFER));
}
}
}
......@@ -462,7 +464,7 @@ Decimal128.fromString = function(string) {
digits[dIdx] = 1;
} else {
return new Decimal128(
new Buffer(isNegative ? INF_NEGATIVE_BUFFER : INF_POSITIVE_BUFFER)
utils.toBuffer(isNegative ? INF_NEGATIVE_BUFFER : INF_POSITIVE_BUFFER)
);
}
}
......@@ -547,7 +549,7 @@ Decimal128.fromString = function(string) {
}
// Encode into a buffer
var buffer = new Buffer(16);
var buffer = utils.allocBuffer(16);
index = 0;
// Encode the low 64 bits of the decimal
......
// Custom inspect property name / symbol.
var inspect = 'inspect';
var utils = require('./parser/utils');
/**
* Machine id.
*
......@@ -58,7 +60,7 @@ var ObjectID = function ObjectID(id) {
'Argument passed in must be a single String of 12 bytes or a string of 24 hex characters'
);
} else if (valid && typeof id === 'string' && id.length === 24 && hasBufferType) {
return new ObjectID(new Buffer(id, 'hex'));
return new ObjectID(utils.toBuffer(id, 'hex'));
} else if (valid && typeof id === 'string' && id.length === 24) {
return ObjectID.createFromHexString(id);
} else if (id != null && id.length === 12) {
......@@ -158,7 +160,7 @@ ObjectID.prototype.generate = function(time) {
: process.pid) % 0xffff;
var inc = this.get_inc();
// Buffer used
var buffer = new Buffer(12);
var buffer = utils.allocBuffer(12);
// Encode time
buffer[3] = time & 0xff;
buffer[2] = (time >> 8) & 0xff;
......@@ -277,7 +279,7 @@ ObjectID.createPk = function createPk() {
* @return {ObjectID} return the created ObjectID
*/
ObjectID.createFromTime = function createFromTime(time) {
var buffer = new Buffer([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
var buffer = utils.toBuffer([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// Encode time into first 4 bytes
buffer[3] = time & 0xff;
buffer[2] = (time >> 8) & 0xff;
......@@ -315,7 +317,7 @@ ObjectID.createFromHexString = function createFromHexString(string) {
}
// Use Buffer.from method if available
if (hasBufferType) return new ObjectID(new Buffer(string, 'hex'));
if (hasBufferType) return new ObjectID(utils.toBuffer(string, 'hex'));
// Calculate lengths
var array = new _Buffer(12);
......
......@@ -92,7 +92,7 @@ function calculateElement(name, value, serializeFunctions, isArray, ignoreUndefi
value['_bsontype'] === 'MaxKey'
) {
return (name != null ? Buffer.byteLength(name, 'utf8') + 1 : 0) + 1;
} else if (value instanceof ObjectID || value['_bsontype'] === 'ObjectID') {
} else if (value instanceof ObjectID || value['_bsontype'] === 'ObjectID' || value['_bsontype'] === 'ObjectId') {
return (name != null ? Buffer.byteLength(name, 'utf8') + 1 : 0) + (12 + 1);
} else if (value instanceof Date || isDate(value)) {
return (name != null ? Buffer.byteLength(name, 'utf8') + 1 : 0) + (8 + 1);
......
......@@ -14,6 +14,8 @@ var Long = require('../long').Long,
BSONRegExp = require('../regexp').BSONRegExp,
Binary = require('../binary').Binary;
var utils = require('./utils');
var deserialize = function(buffer, options, isArray) {
options = options == null ? {} : options;
var index = options && options.index ? options.index : 0;
......@@ -115,7 +117,7 @@ var deserializeObject = function(buffer, index, options, isArray) {
object[name] = buffer.toString('utf8', index, index + stringSize - 1);
index = index + stringSize;
} else if (elementType === BSON.BSON_DATA_OID) {
var oid = new Buffer(12);
var oid = utils.allocBuffer(12);
buffer.copy(oid, 0, index, index + 12);
object[name] = new ObjectID(oid);
index = index + 12;
......@@ -220,7 +222,7 @@ var deserializeObject = function(buffer, index, options, isArray) {
}
} else if (elementType === BSON.BSON_DATA_DECIMAL128) {
// Buffer to contain the decimal bytes
var bytes = new Buffer(16);
var bytes = utils.allocBuffer(16);
// Copy the next 16 bytes into the bytes buffer
buffer.copy(bytes, 0, index, index + 16);
// Update index
......@@ -520,7 +522,7 @@ var deserializeObject = function(buffer, index, options, isArray) {
index = index + stringSize;
// Read the oid
var oidBuffer = new Buffer(12);
var oidBuffer = utils.allocBuffer(12);
buffer.copy(oidBuffer, 0, index, index + 12);
oid = new ObjectID(oidBuffer);
......
......@@ -2,7 +2,6 @@
var writeIEEE754 = require('../float_parser').writeIEEE754,
Long = require('../long').Long,
MinKey = require('../min_key').MinKey,
Map = require('../map'),
Binary = require('../binary').Binary;
......@@ -251,7 +250,7 @@ var serializeMinMax = function(buffer, key, value, index, isArray) {
// Write the type of either min or max key
if (value === null) {
buffer[index++] = BSON.BSON_DATA_NULL;
} else if (value instanceof MinKey) {
} else if (value._bsontype === 'MinKey') {
buffer[index++] = BSON.BSON_DATA_MIN_KEY;
} else {
buffer[index++] = BSON.BSON_DATA_MAX_KEY;
......@@ -718,7 +717,7 @@ var serializeInto = function serializeInto(
index = serializeNull(buffer, key, value, index, true);
} else if (value === null) {
index = serializeNull(buffer, key, value, index, true);
} else if (value['_bsontype'] === 'ObjectID') {
} else if (value['_bsontype'] === 'ObjectID' || value['_bsontype'] === 'ObjectId') {
index = serializeObjectId(buffer, key, value, index, true);
} else if (Buffer.isBuffer(value)) {
index = serializeBuffer(buffer, key, value, index, true);
......@@ -826,7 +825,7 @@ var serializeInto = function serializeInto(
// } else if (value === undefined && ignoreUndefined === true) {
} else if (value === null || (value === undefined && ignoreUndefined === false)) {
index = serializeNull(buffer, key, value, index);
} else if (value['_bsontype'] === 'ObjectID') {
} else if (value['_bsontype'] === 'ObjectID' || value['_bsontype'] === 'ObjectId') {
index = serializeObjectId(buffer, key, value, index);
} else if (Buffer.isBuffer(value)) {
index = serializeBuffer(buffer, key, value, index);
......@@ -928,7 +927,7 @@ var serializeInto = function serializeInto(
if (ignoreUndefined === false) index = serializeNull(buffer, key, value, index);
} else if (value === null) {
index = serializeNull(buffer, key, value, index);
} else if (value['_bsontype'] === 'ObjectID') {
} else if (value['_bsontype'] === 'ObjectID' || value['_bsontype'] === 'ObjectId') {
index = serializeObjectId(buffer, key, value, index);
} else if (Buffer.isBuffer(value)) {
index = serializeBuffer(buffer, key, value, index);
......
......@@ -8,7 +8,21 @@ function normalizedFunctionString(fn) {
return fn.toString().replace(/function *\(/, 'function (');
}
function newBuffer(item, encoding) {
return new Buffer(item, encoding);
}
function allocBuffer() {
return Buffer.alloc.apply(Buffer, arguments);
}
function toBuffer() {
return Buffer.from.apply(Buffer, arguments);
}
module.exports = {
normalizedFunctionString: normalizedFunctionString
normalizedFunctionString: normalizedFunctionString,
allocBuffer: typeof Buffer.alloc === 'function' ? allocBuffer : newBuffer,
toBuffer: typeof Buffer.from === 'function' ? toBuffer : newBuffer
};
......@@ -12,7 +12,7 @@
"browser_build",
"bower.json"
],
"version": "1.1.0",
"version": "1.1.1",
"author": "Christian Amor Kvalheim <christkv@gmail.com>",
"contributors": [],
"repository": "mongodb/js-bson",
......
......@@ -4,6 +4,8 @@ const inherits = require('util').inherits;
const MongoError = require('mongodb-core').MongoError;
const Readable = require('stream').Readable;
const CoreCursor = require('./cursor');
const deprecate = require('util').deprecate;
const SUPPORTS = require('./utils').SUPPORTS;
/**
* @fileOverview The **AggregationCursor** class is an internal class that embodies an aggregation cursor on MongoDB
......@@ -129,6 +131,11 @@ inherits(AggregationCursor, Readable);
for (var name in CoreCursor.prototype) {
AggregationCursor.prototype[name] = CoreCursor.prototype[name];
}
if (SUPPORTS.ASYNC_ITERATOR) {
AggregationCursor.prototype[
Symbol.asyncIterator
] = require('./async/async_iterator').asyncIterator;
}
/**
* Set the batch size for the cursor.
......@@ -153,10 +160,10 @@ AggregationCursor.prototype.batchSize = function(value) {
* @param {object} document The geoNear stage document.
* @return {AggregationCursor}
*/
AggregationCursor.prototype.geoNear = function(document) {
AggregationCursor.prototype.geoNear = deprecate(function(document) {
this.s.cmd.pipeline.push({ $geoNear: document });
return this;
};
}, 'The `$geoNear` stage is deprecated in MongoDB 4.0, and removed in version 4.2.');
/**
* Add a group stage to the aggregation pipeline
......
{
"parserOptions": {
"ecmaVersion": 2018
}
}
'use strict';
async function* asyncIterator() {
while (true) {
const value = await this.next();
if (!value) {
await this.close();
return;
}
yield value;
}
}
exports.asyncIterator = asyncIterator;
'use strict';
var shallowClone = require('./utils').shallowClone,
handleCallback = require('./utils').handleCallback,
MongoError = require('mongodb-core').MongoError,
f = require('util').format;
var authenticate = function(client, username, password, options, callback) {
// Did the user destroy the topology
if (client.topology && client.topology.isDestroyed())
return callback(new MongoError('topology was destroyed'));
// the default db to authenticate against is 'self'
// if authententicate is called from a retry context, it may be another one, like admin
var authdb = options.dbName;
authdb = options.authdb ? options.authdb : authdb;
authdb = options.authSource ? options.authSource : authdb;
// Callback
var _callback = function(err, result) {
if (client.listeners('authenticated').length > 0) {
client.emit('authenticated', err, result);
}
// Return to caller
handleCallback(callback, err, result);
};
// authMechanism
var authMechanism = options.authMechanism || '';
authMechanism = authMechanism.toUpperCase();
// If classic auth delegate to auth command
if (authMechanism === 'MONGODB-CR') {
client.topology.auth('mongocr', authdb, username, password, function(err) {
if (err) return handleCallback(callback, err, false);
_callback(null, true);
});
} else if (authMechanism === 'PLAIN') {
client.topology.auth('plain', authdb, username, password, function(err) {
if (err) return handleCallback(callback, err, false);
_callback(null, true);
});
} else if (authMechanism === 'MONGODB-X509') {
client.topology.auth('x509', authdb, username, password, function(err) {
if (err) return handleCallback(callback, err, false);
_callback(null, true);
});
} else if (authMechanism === 'SCRAM-SHA-1') {
client.topology.auth('scram-sha-1', authdb, username, password, function(err) {
if (err) return handleCallback(callback, err, false);
_callback(null, true);
});
} else if (authMechanism === 'SCRAM-SHA-256') {
client.topology.auth('scram-sha-256', authdb, username, password, function(err) {
if (err) return handleCallback(callback, err, false);
_callback(null, true);
});
} else if (authMechanism === 'GSSAPI') {
if (process.platform === 'win32') {
client.topology.auth('sspi', authdb, username, password, options, function(err) {
if (err) return handleCallback(callback, err, false);
_callback(null, true);
});
} else {
client.topology.auth('gssapi', authdb, username, password, options, function(err) {
if (err) return handleCallback(callback, err, false);
_callback(null, true);
});
}
} else if (authMechanism === 'DEFAULT') {
client.topology.auth('default', authdb, username, password, function(err) {
if (err) return handleCallback(callback, err, false);
_callback(null, true);
});
} else {
handleCallback(
callback,
MongoError.create({
message: f('authentication mechanism %s not supported', options.authMechanism),
driver: true
})
);
}
};
module.exports = function(self, username, password, options, callback) {
if (typeof options === 'function') (callback = options), (options = {});
options = options || {};
// Shallow copy the options
options = shallowClone(options);
// Set default mechanism
if (!options.authMechanism) {
options.authMechanism = 'DEFAULT';
} else if (
options.authMechanism !== 'GSSAPI' &&
options.authMechanism !== 'DEFAULT' &&
options.authMechanism !== 'MONGODB-CR' &&
options.authMechanism !== 'MONGODB-X509' &&
options.authMechanism !== 'SCRAM-SHA-1' &&
options.authMechanism !== 'SCRAM-SHA-256' &&
options.authMechanism !== 'PLAIN'
) {
return handleCallback(
callback,
MongoError.create({
message:
'only DEFAULT, GSSAPI, PLAIN, MONGODB-X509, or SCRAM-SHA-1 is supported by authMechanism',
driver: true
})
);
}
// If we have a callback fallback
if (typeof callback === 'function')
return authenticate(self, username, password, options, function(err, r) {
// Support failed auth method
if (err && err.message && err.message.indexOf('saslStart') !== -1) err.code = 59;
// Reject error
if (err) return callback(err, r);
callback(null, r);
});
// Return a promise
return new self.s.promiseLibrary(function(resolve, reject) {
authenticate(self, username, password, options, function(err, r) {
// Support failed auth method
if (err && err.message && err.message.indexOf('saslStart') !== -1) err.code = 59;
// Reject error
if (err) return reject(err);
resolve(r);
});
});
};
......@@ -25,7 +25,11 @@ const isPromiseLike = require('../utils').isPromiseLike;
function addToOperationsList(bulkOperation, docType, document) {
// Get the bsonSize
const bsonSize = bson.calculateObjectSize(document, {
checkKeys: false
checkKeys: false,
// Since we don't know what the user selected for BSON options here,
// err on the safe side, and check the size with ignoreUndefined: false.
ignoreUndefined: false
});
// Throw error if the doc is bigger than the max BSON size
......
......@@ -25,7 +25,11 @@ const isPromiseLike = require('../utils').isPromiseLike;
function addToOperationsList(bulkOperation, docType, document) {
// Get the bsonSize
const bsonSize = bson.calculateObjectSize(document, {
checkKeys: false
checkKeys: false,
// Since we don't know what the user selected for BSON options here,
// err on the safe side, and check the size with ignoreUndefined: false.
ignoreUndefined: false
});
// Throw error if the doc is bigger than the max BSON size
if (bsonSize >= bulkOperation.s.maxBatchSizeBytes)
......
......@@ -4,7 +4,6 @@ const deprecate = require('util').deprecate;
const deprecateOptions = require('./utils').deprecateOptions;
const checkCollectionName = require('./utils').checkCollectionName;
const ObjectID = require('mongodb-core').BSON.ObjectID;
const AggregationCursor = require('./aggregation_cursor');
const MongoError = require('mongodb-core').MongoError;
const toError = require('./utils').toError;
const normalizeHintField = require('./utils').normalizeHintField;
......@@ -19,10 +18,10 @@ const unordered = require('./bulk/unordered');
const ordered = require('./bulk/ordered');
const ChangeStream = require('./change_stream');
const executeOperation = require('./utils').executeOperation;
const applyWriteConcern = require('./utils').applyWriteConcern;
const resolveReadPreference = require('./utils').resolveReadPreference;
// Operations