diff --git a/.eslintrc.js b/.eslintrc.js index 002d1e7b8b9..91b38166932 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -15,7 +15,8 @@ module.exports = { '!.*', 'node_modules', '.git', - 'data' + 'data', + '.config' ], overrides: [ { diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index 5afe4965aeb..e0783644cfb 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -26,7 +26,7 @@ jobs: with: fetch-depth: 0 - name: Setup node - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 + uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0 with: node-version: 22 diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index 134fbe5ed5e..3b049913c15 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -31,7 +31,7 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Setup node - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 + uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0 with: node-version: 22 @@ -52,7 +52,7 @@ jobs: - run: git fetch --depth=1 --tags # download all tags for documentation - name: Setup node - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 + uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0 with: node-version: 22 diff --git a/.github/workflows/encryption-tests.yml b/.github/workflows/encryption-tests.yml index 3c0fbc65b8a..332f920271b 100644 --- a/.github/workflows/encryption-tests.yml +++ b/.github/workflows/encryption-tests.yml @@ -24,9 +24,9 @@ jobs: env: FORCE_COLOR: true steps: - - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Setup node - uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 + uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0 with: node-version: 22 - name: Install Dependencies diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f78c8c495a4..cfe007d51bb 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -25,7 +25,7 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Setup node - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 + uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0 with: node-version: 22 @@ -61,7 +61,7 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Setup node - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 + uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0 with: node-version: ${{ matrix.node }} @@ -96,7 +96,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Setup node - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 + uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0 with: node-version: 22 - name: Load MongoDB binary cache @@ -124,7 +124,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Setup node - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 + uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0 with: node-version: 22 - run: npm install diff --git a/.github/workflows/tidelift-alignment.yml b/.github/workflows/tidelift-alignment.yml index 552493a7cbc..ad58f3a2b61 100644 --- a/.github/workflows/tidelift-alignment.yml +++ b/.github/workflows/tidelift-alignment.yml @@ -17,7 +17,7 @@ jobs: - name: Checkout uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Setup node - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 + uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0 with: node-version: 22 - name: Alignment diff --git a/.github/workflows/tsd.yml b/.github/workflows/tsd.yml index 672bd36229f..556aa31e21f 100644 --- a/.github/workflows/tsd.yml +++ b/.github/workflows/tsd.yml @@ -23,7 +23,7 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Setup node - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 + uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0 with: node-version: 22 @@ -41,7 +41,7 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Setup node - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 + uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4.2.0 with: node-version: 22 diff --git a/.gitignore b/.gitignore index 9a52110981e..ac921cdc28c 100644 --- a/.gitignore +++ b/.gitignore @@ -70,3 +70,4 @@ list.out data *.pid +mo-expansion* diff --git a/CHANGELOG.md b/CHANGELOG.md index 62f7c005fdf..7ece54ba807 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,68 @@ +8.12.1 / 2025-03-04 +=================== + * fix: match bson version with mongodb's bson version #15297 [hasezoey](https://github.com/hasezoey) + +8.12.0 / 2025-03-03 +=================== + * feat: bump mongodb driver to 6.14 + * feat: expose "SchemaTypeOptions" in browser #15277 [hasezoey](https://github.com/hasezoey) + * docs: update field-level-encryption.md #15272 [dphrag](https://github.com/dphrag) + +8.11.0 / 2025-02-26 +=================== + * feat(model): make bulkWrite results include MongoDB bulk write errors as well as validation errors #15271 #15265 + * feat(document): add schemaFieldsOnly option to toObject() and toJSON() #15259 #15218 + * feat: introduce populate ordered option for populating in series rather than in parallel for transactions #15239 #15231 #15210 + * fix(bigint): throw error when casting BigInt that's outside of the bounds of what MongoDB can safely store #15230 #15200 + +8.10.2 / 2025-02-25 +=================== + * fix(model+connection): return MongoDB BulkWriteResult instance even if no valid ops #15266 #15265 + * fix(debug): avoid printing trusted symbol in debug output #15267 #15263 + * types: make type inference logic resilient to no Buffer type due to missing @types/node #15261 + +8.10.1 / 2025-02-14 +=================== + * perf(document): only call undoReset() 1x/document #15257 #15255 + * perf(schema): clear childSchemas when overwriting existing path to avoid performance degradations #15256 #15253 + * perf: some more micro optimizations for find() and findOne() #14906 #15250 + * fix(model): avoid adding timeout on Model.init() buffering to avoid unintentional dangling open handles #15251 #15241 + * fix: avoid connection buffering on init if autoCreate: false #15247 #15241 + * fix: infer discriminator key if set in $set with overwriteDiscriminatorKey #15243 #15218 + * types(middleware): make this in document middleware the hydrated doc type, not raw doc type #15246 #15242 + * types(schema): support options parameter to Schema.prototype.discriminator() #15249 #15244 + * types(schema): allow calling Schema.prototype.number() with no message arg #15237 #15236 + * docs(typescript): recommend using HydratedSingleSubdocument over Types.Subdocument #15240 #15211 + +8.10.0 / 2025-02-05 +=================== + * feat(schema+schematype): add toJSONSchema() method to convert schemas and schematypes to JSON schema #15184 #11162 + * feat(connection): make connection helpers respect bufferTimeoutMS #15229 #15201 + * feat(document): support schematype-level transform option #15163 #15084 + * feat(model): add insertOne() function to insert a single doc #15162 #14843 + * feat(connection): support Connection.prototype.aggregate() for db-level aggregations #15153 + * feat(model): make syncIndexes() not call createIndex() on indexes that already exist #15175 #12250 + * feat(model): useConnection(connection) function #14802 + * fix(model): disallow updateMany(update) and fix TypeScript types re: updateMany() #15199 #15190 + * fix(collection): avoid buffering if creating a collection during a connection interruption #15187 #14971 + * fix(model): throw error if calling create() with multiple docs in a transaction unless ordered: true #15100 + * fix(model): skip createCollection() in syncIndexes() if autoCreate: false #15155 + * fix(model): make `hydrate()` handle hydrating deeply nested populated docs with hydratedPopulatedDocs #15130 + * types(document): make sure toObject() and toJSON() apply versionKey __v #15097 + * ci(NODE-6505): CI Setup for Encryption Support #15139 [aditi-khare-mongoDB](https://github.com/aditi-khare-mongoDB) + +8.9.7 / 2025-02-04 +================== + * fix: avoid applying defaults on map embedded paths #15217 #15196 + * types: add missing $median operator to aggregation types #15233 #15209 + * docs(document): clarify that toObject() returns a POJO that may contain non-POJO values #15232 #15208 + +8.9.6 / 2025-01-31 +================== + * fix(document): allow setting values to undefined with set(obj) syntax with strict: false #15207 #15192 + * fix(schema): improve reason for UUID cast error, currently a TypeError #15215 #15202 + * fix(aggregate): improve error when calling near() with invalid coordinates #15206 #15188 + 7.8.6 / 2025-01-20 =================== * chore: remove coverage output from bundle @@ -94,8 +159,6 @@ * fix: disallow using $where in match * perf: cache results from getAllSubdocs() on saveOptions, only loop through known subdoc properties #15055 #15029 * fix(model+query): support overwriteDiscriminatorKey for bulkWrite updateOne and updateMany, allow inferring discriminator key from update #15046 #15040 -======= ->>>>>>> 7.x 7.8.3 / 2024-11-26 ================== diff --git a/docs/field-level-encryption.md b/docs/field-level-encryption.md index 3531fca0218..828bcd36664 100644 --- a/docs/field-level-encryption.md +++ b/docs/field-level-encryption.md @@ -39,9 +39,8 @@ Keep in mind that the following example is a simple example to help you get star The encryption key in the following example is insecure; MongoDB recommends using a [KMS](https://www.mongodb.com/docs/v5.0/core/security-client-side-encryption-key-management/). ```javascript -const { ClientEncryption } = require('mongodb-client-encryption'); +const { ClientEncryption } = require('mongodb'); const mongoose = require('mongoose'); -const { Binary } = require('mongodb'); run().catch(err => console.log(err)); @@ -66,12 +65,14 @@ async function run() { kmsProviders } }).asPromise(); - const encryption = new ClientEncryption(conn.client, { + const encryption = new ClientEncryption(conn.getClient(), { keyVaultNamespace, kmsProviders, }); - const _key = await encryption.createDataKey('local'); + const _key = await encryption.createDataKey('local', { + keyAltNames: ['exampleKeyName'], + }); } ``` @@ -112,3 +113,51 @@ With the above connection, if you create a model named 'Test' that uses the 'tes const Model = mongoose.model('Test', mongoose.Schema({ name: String })); await Model.create({ name: 'super secret' }); ``` + +## Automatic FLE in Mongoose + +Mongoose supports the declaration of encrypted schemas - schemas that, when connected to a model, utilize MongoDB's Client Side +Field Level Encryption or Queryable Encryption under the hood. Mongoose automatically generates either an `encryptedFieldsMap` or a +`schemaMap` when instantiating a MongoClient and encrypts fields on write and decrypts fields on reads. + +### Encryption types + +MongoDB has two different automatic encryption implementations: client side field level encryption (CSFLE) and queryable encryption (QE). +See [choosing an in-use encryption approach](https://www.mongodb.com/docs/v7.3/core/queryable-encryption/about-qe-csfle/#choosing-an-in-use-encryption-approach). + +### Declaring Encrypted Schemas + +The following schema declares two properties, `name` and `ssn`. `ssn` is encrypted using queryable encryption, and +is configured for equality queries: + +```javascript +const encryptedUserSchema = new Schema({ + name: String, + ssn: { + type: String, + // 1 + encrypt: { + keyId: '', + queries: 'equality' + } + } + // 2 +}, { encryptionType: 'queryableEncryption' }); +``` + +To declare a field as encrypted, you must: + +1. Annotate the field with encryption metadata in the schema definition +2. Choose an encryption type for the schema and configure the schema for the encryption type + +Not all schematypes are supported for CSFLE and QE. For an overview of valid schema types, refer to MongoDB's documentation. + +### Registering Models + +Encrypted schemas must be registered on a connection, not the Mongoose global: + +```javascript + +const connection = mongoose.createConnection(); +const UserModel = connection.model('User', encryptedUserSchema); +``` diff --git a/docs/schematypes.md b/docs/schematypes.md index 062b3251311..748832418da 100644 --- a/docs/schematypes.md +++ b/docs/schematypes.md @@ -654,16 +654,16 @@ typeof question.answer; // 'bigint' ### Double {#double} Mongoose supports [64-bit IEEE 754-2008 floating point numbers](https://en.wikipedia.org/wiki/IEEE_754-2008_revision) as a SchemaType. -Int32s are stored as [BSON type "double" in MongoDB](https://www.mongodb.com/docs/manual/reference/bson-types/). +Doubles are stored as [BSON type "double" in MongoDB](https://www.mongodb.com/docs/manual/reference/bson-types/). ```javascript -const studentsSchema = new Schema({ - id: Int32 +const temperatureSchema = new Schema({ + celsius: Double }); -const Student = mongoose.model('Student', schema); +const Temperature = mongoose.model('Temperature', temperatureSchema); -const student = new Temperature({ celsius: 1339 }); -typeof student.id; // 'number' +const temperature = new Temperature({ celsius: 1339 }); +temperature.celsius instanceof bson.Double; // true ``` There are several types of values that will be successfully cast to a Double. @@ -673,7 +673,7 @@ new Temperature({ celsius: '1.2e12' }).celsius; // 15 as a Double new Temperature({ celsius: true }).celsius; // 1 as a Double new Temperature({ celsius: false }).celsius; // 0 as a Double new Temperature({ celsius: { valueOf: () => 83.0033 } }).celsius; // 83 as a Double -new Temperature({ celsius: '' }).celsius; // null as a Double +new Temperature({ celsius: '' }).celsius; // null ``` The following inputs will result will all result in a [CastError](validation.html#cast-errors) once validated, meaning that it will not throw on initialization, only when validated: @@ -688,12 +688,12 @@ Mongoose supports 32-bit integers as a SchemaType. Int32s are stored as [32-bit integers in MongoDB (BSON type "int")](https://www.mongodb.com/docs/manual/reference/bson-types/). ```javascript -const studentsSchema = new Schema({ +const studentSchema = new Schema({ id: Int32 }); -const Student = mongoose.model('Student', schema); +const Student = mongoose.model('Student', studentSchema); -const student = new Temperature({ celsius: 1339 }); +const student = new Student({ id: 1339 }); typeof student.id; // 'number' ``` diff --git a/docs/typescript/subdocuments.md b/docs/typescript/subdocuments.md index 49edbb4ca27..797a53fcc12 100644 --- a/docs/typescript/subdocuments.md +++ b/docs/typescript/subdocuments.md @@ -38,9 +38,11 @@ Define a separate `THydratedDocumentType` and pass it as the 5th generic param t `THydratedDocumentType` controls what type Mongoose uses for "hydrated documents", that is, what `await UserModel.findOne()`, `UserModel.hydrate()`, and `new UserModel()` return. ```ts +import { HydratedSingleSubdocument } from 'mongoose'; + // Define property overrides for hydrated documents type THydratedUserDocument = { - names?: mongoose.Types.Subdocument + names?: HydratedSingleSubdocument } type UserModelType = mongoose.Model; @@ -51,6 +53,7 @@ const UserModel = mongoose.model('User', userSchema); const doc = new UserModel({ names: { _id: '0'.repeat(24), firstName: 'foo' } }); doc.names!.ownerDocument(); // Works, `names` is a subdocument! +doc.names!.firstName; // 'foo' ``` ## Subdocument Arrays @@ -81,4 +84,5 @@ const UserModel = model('User', new Schema} arg.near GeoJSON point or coordinates array * @return {Aggregate} * @api public */ Aggregate.prototype.near = function(arg) { + if (arg == null) { + throw new MongooseError('Aggregate `near()` must be called with non-nullish argument'); + } + if (arg.near == null) { + throw new MongooseError('Aggregate `near()` argument must have a `near` property'); + } + const coordinates = Array.isArray(arg.near) ? arg.near : arg.near.coordinates; + if (typeof arg.near === 'object' && (!Array.isArray(coordinates) || coordinates.length < 2 || coordinates.find(c => typeof c !== 'number'))) { + throw new MongooseError(`Aggregate \`near()\` argument has invalid coordinates, got "${coordinates}"`); + } + const op = {}; op.$geoNear = arg; return this.append(op); diff --git a/lib/browser.js b/lib/browser.js index 12b0cbde653..a01c9187b0d 100644 --- a/lib/browser.js +++ b/lib/browser.js @@ -83,6 +83,15 @@ exports.VirtualType = require('./virtualType'); exports.SchemaType = require('./schemaType.js'); +/** + * The constructor used for schematype options + * + * @method SchemaTypeOptions + * @api public + */ + +exports.SchemaTypeOptions = require('./options/schemaTypeOptions'); + /** * Internal utils * diff --git a/lib/cast/bigint.js b/lib/cast/bigint.js index 20e01ae58b1..c046ba0f00a 100644 --- a/lib/cast/bigint.js +++ b/lib/cast/bigint.js @@ -1,6 +1,5 @@ 'use strict'; -const assert = require('assert'); const { Long } = require('bson'); /** @@ -13,6 +12,10 @@ const { Long } = require('bson'); * @api private */ +const MAX_BIGINT = 9223372036854775807n; +const MIN_BIGINT = -9223372036854775808n; +const ERROR_MESSAGE = `Mongoose only supports BigInts between ${MIN_BIGINT} and ${MAX_BIGINT} because MongoDB does not support arbitrary precision integers`; + module.exports = function castBigInt(val) { if (val == null) { return val; @@ -21,6 +24,9 @@ module.exports = function castBigInt(val) { return null; } if (typeof val === 'bigint') { + if (val > MAX_BIGINT || val < MIN_BIGINT) { + throw new Error(ERROR_MESSAGE); + } return val; } @@ -29,8 +35,12 @@ module.exports = function castBigInt(val) { } if (typeof val === 'string' || typeof val === 'number') { - return BigInt(val); + val = BigInt(val); + if (val > MAX_BIGINT || val < MIN_BIGINT) { + throw new Error(ERROR_MESSAGE); + } + return val; } - assert.ok(false); + throw new Error(`Cannot convert value to BigInt: "${val}"`); }; diff --git a/lib/cast/uuid.js b/lib/cast/uuid.js new file mode 100644 index 00000000000..6e296bf3e24 --- /dev/null +++ b/lib/cast/uuid.js @@ -0,0 +1,78 @@ +'use strict'; + +const MongooseBuffer = require('../types/buffer'); + +const UUID_FORMAT = /[0-9a-f]{8}-[0-9a-f]{4}-[0-9][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}/i; +const Binary = MongooseBuffer.Binary; + +module.exports = function castUUID(value) { + if (value == null) { + return value; + } + + function newBuffer(initbuff) { + const buff = new MongooseBuffer(initbuff); + buff._subtype = 4; + return buff; + } + + if (typeof value === 'string') { + if (UUID_FORMAT.test(value)) { + return stringToBinary(value); + } else { + throw new Error(`"${value}" is not a valid UUID string`); + } + } + + if (Buffer.isBuffer(value)) { + return newBuffer(value); + } + + if (value instanceof Binary) { + return newBuffer(value.value(true)); + } + + // Re: gh-647 and gh-3030, we're ok with casting using `toString()` + // **unless** its the default Object.toString, because "[object Object]" + // doesn't really qualify as useful data + if (value.toString && value.toString !== Object.prototype.toString) { + if (UUID_FORMAT.test(value.toString())) { + return stringToBinary(value.toString()); + } + } + + throw new Error(`"${value}" cannot be casted to a UUID`); +}; + +module.exports.UUID_FORMAT = UUID_FORMAT; + +/** + * Helper function to convert the input hex-string to a buffer + * @param {String} hex The hex string to convert + * @returns {Buffer} The hex as buffer + * @api private + */ + +function hex2buffer(hex) { + // use buffer built-in function to convert from hex-string to buffer + const buff = hex != null && Buffer.from(hex, 'hex'); + return buff; +} + +/** + * Convert a String to Binary + * @param {String} uuidStr The value to process + * @returns {MongooseBuffer} The binary to store + * @api private + */ + +function stringToBinary(uuidStr) { + // Protect against undefined & throwing err + if (typeof uuidStr !== 'string') uuidStr = ''; + const hex = uuidStr.replace(/[{}-]/g, ''); // remove extra characters + const bytes = hex2buffer(hex); + const buff = new MongooseBuffer(bytes); + buff._subtype = 4; + + return buff; +} diff --git a/lib/collection.js b/lib/collection.js index e6c365c9a13..9ab67b992c7 100644 --- a/lib/collection.js +++ b/lib/collection.js @@ -81,7 +81,7 @@ Collection.prototype.onOpen = function() { * @api private */ -Collection.prototype.onClose = function() {}; +Collection.prototype.onClose = function() { }; /** * Queues a method for later execution when its @@ -311,13 +311,7 @@ Collection.prototype._getBufferTimeoutMS = function _getBufferTimeoutMS() { if (opts && opts.schemaUserProvidedOptions != null && opts.schemaUserProvidedOptions.bufferTimeoutMS != null) { return opts.schemaUserProvidedOptions.bufferTimeoutMS; } - if (conn.config.bufferTimeoutMS != null) { - return conn.config.bufferTimeoutMS; - } - if (conn.base != null && conn.base.get('bufferTimeoutMS') != null) { - return conn.base.get('bufferTimeoutMS'); - } - return 10000; + return conn._getBufferTimeoutMS(); }; /*! diff --git a/lib/connection.js b/lib/connection.js index b747460083c..0c19b74cc11 100644 --- a/lib/connection.js +++ b/lib/connection.js @@ -23,6 +23,7 @@ const CreateCollectionsError = require('./error/createCollectionsError'); const castBulkWrite = require('./helpers/model/castBulkWrite'); const { modelSymbol } = require('./helpers/symbols'); const isPromise = require('./helpers/isPromise'); +const decorateBulkWriteResult = require('./helpers/model/decorateBulkWriteResult'); const arrayAtomicsSymbol = require('./helpers/symbols').arrayAtomicsSymbol; const sessionNewDocuments = require('./helpers/symbols').sessionNewDocuments; @@ -559,7 +560,9 @@ Connection.prototype.bulkWrite = async function bulkWrite(ops, options) { 'bulkWrite' ); } - return getDefaultBulkwriteResult(); + const BulkWriteResult = this.base.driver.get().BulkWriteResult; + const res = new BulkWriteResult(getDefaultBulkwriteResult(), false); + return decorateBulkWriteResult(res, validationErrors, results); } let error; @@ -567,16 +570,17 @@ Connection.prototype.bulkWrite = async function bulkWrite(ops, options) { then(res => ([res, null])). catch(err => ([null, err])); + for (let i = 0; i < validOpIndexes.length; ++i) { + results[validOpIndexes[i]] = null; + } if (error) { if (validationErrors.length > 0) { + decorateBulkWriteResult(error, validationErrors, results); error.mongoose = error.mongoose || {}; error.mongoose.validationErrors = validationErrors; } } - for (let i = 0; i < validOpIndexes.length; ++i) { - results[validOpIndexes[i]] = null; - } if (validationErrors.length > 0) { if (options.throwOnValidationError) { throw new MongooseBulkWriteError( @@ -586,9 +590,7 @@ Connection.prototype.bulkWrite = async function bulkWrite(ops, options) { 'bulkWrite' ); } else { - res.mongoose = res.mongoose || {}; - res.mongoose.validationErrors = validationErrors; - res.mongoose.results = results; + decorateBulkWriteResult(res, validationErrors, results); } } } @@ -607,7 +609,7 @@ Connection.prototype.bulkWrite = async function bulkWrite(ops, options) { Connection.prototype.createCollections = async function createCollections(options = {}) { const result = {}; - const errorsMap = { }; + const errorsMap = {}; const { continueOnError } = options; delete options.continueOnError; @@ -734,7 +736,7 @@ Connection.prototype.transaction = function transaction(fn, options) { throw err; }). finally(() => { - session.endSession().catch(() => {}); + session.endSession().catch(() => { }); }); }); }; @@ -818,18 +820,71 @@ Connection.prototype.dropCollection = async function dropCollection(collection) /** * Waits for connection to be established, so the connection has a `client` * + * @param {Boolean} [noTimeout=false] if set, don't put a timeout on the operation. Used internally so `mongoose.model()` doesn't leave open handles. * @return Promise * @api private */ -Connection.prototype._waitForConnect = async function _waitForConnect() { +Connection.prototype._waitForConnect = async function _waitForConnect(noTimeout) { if ((this.readyState === STATES.connecting || this.readyState === STATES.disconnected) && this._shouldBufferCommands()) { - await new Promise(resolve => { - this._queue.push({ fn: resolve }); + const bufferTimeoutMS = this._getBufferTimeoutMS(); + let timeout = null; + let timedOut = false; + // The element that this function pushes onto `_queue`, stored to make it easy to remove later + const queueElement = {}; + + // Mongoose executes all elements in `_queue` when initial connection succeeds in `onOpen()`. + const waitForConnectPromise = new Promise(resolve => { + queueElement.fn = resolve; + this._queue.push(queueElement); }); + + if (noTimeout) { + await waitForConnectPromise; + } else { + await Promise.race([ + waitForConnectPromise, + new Promise(resolve => { + timeout = setTimeout( + () => { + timedOut = true; + resolve(); + }, + bufferTimeoutMS + ); + }) + ]); + } + + if (timedOut) { + const index = this._queue.indexOf(queueElement); + if (index !== -1) { + this._queue.splice(index, 1); + } + const message = 'Connection operation buffering timed out after ' + bufferTimeoutMS + 'ms'; + throw new MongooseError(message); + } else if (timeout != null) { + // Not strictly necessary, but avoid the extra overhead of creating a new MongooseError + // in case of success + clearTimeout(timeout); + } } }; +/*! + * Get the default buffer timeout for this connection + */ + +Connection.prototype._getBufferTimeoutMS = function _getBufferTimeoutMS() { + if (this.config.bufferTimeoutMS != null) { + return this.config.bufferTimeoutMS; + } + if (this.base != null && this.base.get('bufferTimeoutMS') != null) { + return this.base.get('bufferTimeoutMS'); + } + return 10000; +}; + /** * Helper for MongoDB Node driver's `listCollections()`. * Returns an array of collection objects. @@ -1025,7 +1080,7 @@ Connection.prototype.openUri = async function openUri(uri, options) { for (const model of Object.values(this.models)) { // Errors handled internally, so safe to ignore error - model.init().catch(function $modelInitNoop() {}); + model.init().catch(function $modelInitNoop() { }); } // `createConnection()` calls this `openUri()` function without @@ -1061,7 +1116,7 @@ Connection.prototype.openUri = async function openUri(uri, options) { // to avoid uncaught exceptions when using `on('error')`. See gh-14377. Connection.prototype.on = function on(event, callback) { if (event === 'error' && this.$initialConnection) { - this.$initialConnection.catch(() => {}); + this.$initialConnection.catch(() => { }); } return EventEmitter.prototype.on.call(this, event, callback); }; @@ -1083,7 +1138,7 @@ Connection.prototype.on = function on(event, callback) { // to avoid uncaught exceptions when using `on('error')`. See gh-14377. Connection.prototype.once = function on(event, callback) { if (event === 'error' && this.$initialConnection) { - this.$initialConnection.catch(() => {}); + this.$initialConnection.catch(() => { }); } return EventEmitter.prototype.once.call(this, event, callback); }; @@ -1156,6 +1211,10 @@ Connection.prototype.close = async function close(force) { this.$wasForceClosed = !!force; } + if (this._lastHeartbeatAt != null) { + this._lastHeartbeatAt = null; + } + for (const model of Object.values(this.models)) { // If manually disconnecting, make sure to clear each model's `$init` // promise, so Mongoose knows to re-run `init()` in case the @@ -1412,7 +1471,7 @@ Connection.prototype.model = function model(name, schema, collection, options) { } // Errors handled internally, so safe to ignore error - model.init().catch(function $modelInitNoop() {}); + model.init().catch(function $modelInitNoop() { }); return model; } @@ -1439,7 +1498,7 @@ Connection.prototype.model = function model(name, schema, collection, options) { } if (this === model.prototype.db - && (!collection || collection === model.collection.name)) { + && (!collection || collection === model.collection.name)) { // model already uses this connection. // only the first model with this name is cached to allow @@ -1626,8 +1685,8 @@ Connection.prototype.authMechanismDoesNotRequirePassword = function authMechanis */ Connection.prototype.optionsProvideAuthenticationData = function optionsProvideAuthenticationData(options) { return (options) && - (options.user) && - ((options.pass) || this.authMechanismDoesNotRequirePassword()); + (options.user) && + ((options.pass) || this.authMechanismDoesNotRequirePassword()); }; /** @@ -1689,7 +1748,7 @@ Connection.prototype.createClient = function createClient() { */ Connection.prototype.syncIndexes = async function syncIndexes(options = {}) { const result = {}; - const errorsMap = { }; + const errorsMap = {}; const { continueOnError } = options; delete options.continueOnError; diff --git a/lib/cursor/aggregationCursor.js b/lib/cursor/aggregationCursor.js index a49ec426ed7..01cf961d5dd 100644 --- a/lib/cursor/aggregationCursor.js +++ b/lib/cursor/aggregationCursor.js @@ -8,6 +8,7 @@ const MongooseError = require('../error/mongooseError'); const Readable = require('stream').Readable; const eachAsync = require('../helpers/cursor/eachAsync'); const immediate = require('../helpers/immediate'); +const kareem = require('kareem'); const util = require('util'); /** @@ -62,7 +63,11 @@ util.inherits(AggregationCursor, Readable); function _init(model, c, agg) { if (!model.collection.buffer) { - model.hooks.execPre('aggregate', agg, function() { + model.hooks.execPre('aggregate', agg, function(err) { + if (err != null) { + _handlePreHookError(c, err); + return; + } if (typeof agg.options?.cursor?.transform === 'function') { c._transforms.push(agg.options.cursor.transform); } @@ -72,7 +77,12 @@ function _init(model, c, agg) { }); } else { model.collection.emitter.once('queue', function() { - model.hooks.execPre('aggregate', agg, function() { + model.hooks.execPre('aggregate', agg, function(err) { + if (err != null) { + _handlePreHookError(c, err); + return; + } + if (typeof agg.options?.cursor?.transform === 'function') { c._transforms.push(agg.options.cursor.transform); } @@ -84,6 +94,38 @@ function _init(model, c, agg) { } } +/** +* Handles error emitted from pre middleware. In particular, checks for `skipWrappedFunction`, which allows skipping +* the actual aggregation and overwriting the function's return value. Because aggregation cursors don't return a value, +* we need to make sure the user doesn't accidentally set a value in skipWrappedFunction. +* +* @param {QueryCursor} queryCursor +* @param {Error} err +* @returns +*/ + +function _handlePreHookError(queryCursor, err) { + if (err instanceof kareem.skipWrappedFunction) { + const resultValue = err.args[0]; + if (resultValue != null && (!Array.isArray(resultValue) || resultValue.length)) { + const err = new MongooseError( + 'Cannot `skipMiddlewareFunction()` with a value when using ' + + '`.aggregate().cursor()`, value must be nullish or empty array, got "' + + util.inspect(resultValue) + + '".' + ); + queryCursor._markError(err); + queryCursor.listeners('error').length > 0 && queryCursor.emit('error', err); + return; + } + queryCursor.emit('cursor', null); + return; + } + queryCursor._markError(err); + queryCursor.listeners('error').length > 0 && queryCursor.emit('error', err); +} + + /** * Necessary to satisfy the Readable API * @method _read @@ -424,6 +466,7 @@ function _next(ctx, cb) { err => callback(err) ); } else { + ctx.once('error', cb); ctx.once('cursor', function() { _next(ctx, cb); }); diff --git a/lib/document.js b/lib/document.js index 55b916893ea..e43c0e67157 100644 --- a/lib/document.js +++ b/lib/document.js @@ -741,15 +741,10 @@ function init(self, obj, doc, opts, prefix) { let schemaType; let path; let i; - let index = 0; const strict = self.$__.strictMode; const docSchema = self.$__schema; - while (index < len) { - _init(index++); - } - - function _init(index) { + for (let index = 0; index < len; ++index) { i = keys[index]; // avoid prototype pollution if (i === '__proto__' || i === 'constructor') { @@ -1147,7 +1142,7 @@ Document.prototype.$set = function $set(path, val, type, options) { } else if (pathtype === 'nested' && valForKey == null) { this.$set(pathName, valForKey, constructing, options); } - } else if (valForKey !== void 0) { + } else { this.$set(pathName, valForKey, constructing, options); } } @@ -3558,8 +3553,10 @@ Document.prototype.$__undoReset = function $__undoReset() { } } - for (const subdoc of this.$getAllSubdocs()) { - subdoc.$__undoReset(); + if (!this.$isSubdocument) { + for (const subdoc of this.$getAllSubdocs()) { + subdoc.$__undoReset(); + } } }; @@ -3839,15 +3836,39 @@ Document.prototype.$toObject = function(options, json) { // Parent options should only bubble down for subdocuments, not populated docs options._parentOptions = this.$isSubdocument ? options : null; - // remember the root transform function - // to save it from being overwritten by sub-transform functions - // const originalTransform = options.transform; + const schemaFieldsOnly = options._calledWithOptions.schemaFieldsOnly + ?? options.schemaFieldsOnly + ?? defaultOptions.schemaFieldsOnly + ?? false; let ret; if (hasOnlyPrimitiveValues && !options.flattenObjectIds) { // Fast path: if we don't have any nested objects or arrays, we only need a // shallow clone. - ret = this.$__toObjectShallow(); + ret = this.$__toObjectShallow(schemaFieldsOnly); + } else if (schemaFieldsOnly) { + ret = {}; + for (const path of Object.keys(this.$__schema.paths)) { + const value = this.$__getValue(path); + if (value === undefined) { + continue; + } + let pathToSet = path; + let objToSet = ret; + if (path.indexOf('.') !== -1) { + const segments = path.split('.'); + pathToSet = segments[segments.length - 1]; + for (let i = 0; i < segments.length - 1; ++i) { + objToSet[segments[i]] = objToSet[segments[i]] ?? {}; + objToSet = objToSet[segments[i]]; + } + } + if (value === null) { + objToSet[pathToSet] = null; + continue; + } + objToSet[pathToSet] = clone(value, options); + } } else { ret = clone(this._doc, options) || {}; } @@ -3913,10 +3934,12 @@ Document.prototype.$toObject = function(options, json) { * Internal shallow clone alternative to `$toObject()`: much faster, no options processing */ -Document.prototype.$__toObjectShallow = function $__toObjectShallow() { +Document.prototype.$__toObjectShallow = function $__toObjectShallow(schemaFieldsOnly) { const ret = {}; if (this._doc != null) { - for (const key of Object.keys(this._doc)) { + const keys = schemaFieldsOnly ? Object.keys(this.$__schema.paths) : Object.keys(this._doc); + for (const key of keys) { + // Safe to do this even in the schemaFieldsOnly case because we assume there's no nested paths const value = this._doc[key]; if (value instanceof Date) { ret[key] = new Date(value); @@ -4069,7 +4092,8 @@ Document.prototype.$__toObjectShallow = function $__toObjectShallow() { * @param {Boolean} [options.flattenMaps=false] if true, convert Maps to POJOs. Useful if you want to `JSON.stringify()` the result of `toObject()`. * @param {Boolean} [options.flattenObjectIds=false] if true, convert any ObjectIds in the result to 24 character hex strings. * @param {Boolean} [options.useProjection=false] - If true, omits fields that are excluded in this document's projection. Unless you specified a projection, this will omit any field that has `select: false` in the schema. - * @return {Object} js object (not a POJO) + * @param {Boolean} [options.schemaFieldsOnly=false] - If true, the resulting object will only have fields that are defined in the document's schema. By default, `toObject()` returns all fields in the underlying document from MongoDB, including ones that are not listed in the schema. + * @return {Object} document as a plain old JavaScript object (POJO). This object may contain ObjectIds, Maps, Dates, mongodb.Binary, Buffers, and other non-POJO values. * @see mongodb.Binary https://mongodb.github.io/node-mongodb-native/4.9/classes/Binary.html * @api public * @memberOf Document @@ -4339,6 +4363,7 @@ function omitDeselectedFields(self, json) { * @param {Object} options * @param {Boolean} [options.flattenMaps=true] if true, convert Maps to [POJOs](https://masteringjs.io/tutorials/fundamentals/pojo). Useful if you want to `JSON.stringify()` the result. * @param {Boolean} [options.flattenObjectIds=false] if true, convert any ObjectIds in the result to 24 character hex strings. + * @param {Boolean} [options.schemaFieldsOnly=false] - If true, the resulting object will only have fields that are defined in the document's schema. By default, `toJSON()` returns all fields in the underlying document from MongoDB, including ones that are not listed in the schema. * @return {Object} * @see Document#toObject https://mongoosejs.com/docs/api/document.html#Document.prototype.toObject() * @see JSON.stringify() in JavaScript https://thecodebarbarian.com/the-80-20-guide-to-json-stringify-in-javascript.html @@ -4509,6 +4534,8 @@ Document.prototype.equals = function(doc) { * @param {Object|Function} [options.match=null] Add an additional filter to the populate query. Can be a filter object containing [MongoDB query syntax](https://www.mongodb.com/docs/manual/tutorial/query-documents/), or a function that returns a filter object. * @param {Function} [options.transform=null] Function that Mongoose will call on every populated document that allows you to transform the populated document. * @param {Object} [options.options=null] Additional options like `limit` and `lean`. + * @param {Boolean} [options.forceRepopulate=true] Set to `false` to prevent Mongoose from repopulating paths that are already populated + * @param {Boolean} [options.ordered=false] Set to `true` to execute any populate queries one at a time, as opposed to in parallel. We recommend setting this option to `true` if using transactions, especially if also populating multiple paths or paths with multiple models. MongoDB server does **not** support multiple operations in parallel on a single transaction. * @param {Function} [callback] Callback * @see population https://mongoosejs.com/docs/populate.html * @see Query#select https://mongoosejs.com/docs/api/query.html#Query.prototype.select() @@ -4535,6 +4562,7 @@ Document.prototype.populate = async function populate() { } const paths = utils.object.vals(pop); + let topLevelModel = this.constructor; if (this.$__isNested) { topLevelModel = this.$__[scopeSymbol].constructor; diff --git a/lib/drivers/browser/index.js b/lib/drivers/browser/index.js index 681ba9519dc..2c77c712dde 100644 --- a/lib/drivers/browser/index.js +++ b/lib/drivers/browser/index.js @@ -10,3 +10,4 @@ exports.Collection = function() { exports.Connection = function() { throw new Error('Cannot create a connection from browser library'); }; +exports.BulkWriteResult = function() {}; diff --git a/lib/drivers/node-mongodb-native/bulkWriteResult.js b/lib/drivers/node-mongodb-native/bulkWriteResult.js new file mode 100644 index 00000000000..e3c79b7e381 --- /dev/null +++ b/lib/drivers/node-mongodb-native/bulkWriteResult.js @@ -0,0 +1,5 @@ +'use strict'; + +const BulkWriteResult = require('mongodb/lib/bulk/common').BulkWriteResult; + +module.exports = BulkWriteResult; diff --git a/lib/drivers/node-mongodb-native/collection.js b/lib/drivers/node-mongodb-native/collection.js index d2150c6ad57..1e4678ff93f 100644 --- a/lib/drivers/node-mongodb-native/collection.js +++ b/lib/drivers/node-mongodb-native/collection.js @@ -13,6 +13,8 @@ const internalToObjectOptions = require('../../options').internalToObjectOptions const stream = require('stream'); const util = require('util'); +const formatToObjectOptions = Object.freeze({ ...internalToObjectOptions, copyTrustedSymbol: false }); + /** * A [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) collection implementation. * @@ -384,7 +386,9 @@ function format(obj, sub, color, shell) { } const clone = require('../../helpers/clone'); - let x = clone(obj, internalToObjectOptions); + // `sub` indicates `format()` was called recursively, so skip cloning because we already + // did a deep clone on the top-level object. + let x = sub ? obj : clone(obj, formatToObjectOptions); const constructorName = getConstructorName(x); if (constructorName === 'Binary') { diff --git a/lib/drivers/node-mongodb-native/connection.js b/lib/drivers/node-mongodb-native/connection.js index 0659ac4e647..fa561c2560c 100644 --- a/lib/drivers/node-mongodb-native/connection.js +++ b/lib/drivers/node-mongodb-native/connection.js @@ -12,6 +12,7 @@ const pkg = require('../../../package.json'); const processConnectionOptions = require('../../helpers/processConnectionOptions'); const setTimeout = require('../../helpers/timers').setTimeout; const utils = require('../../utils'); +const { Schema } = require('../../mongoose'); /** * A [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) connection implementation. @@ -281,6 +282,11 @@ NativeConnection.prototype.createClient = async function createClient(uri, optio delete options.autoSearchIndex; } + if ('bufferTimeoutMS' in options) { + this.config.bufferTimeoutMS = options.bufferTimeoutMS; + delete options.bufferTimeoutMS; + } + // Backwards compat if (options.user || options.pass) { options.auth = options.auth || {}; @@ -315,6 +321,16 @@ NativeConnection.prototype.createClient = async function createClient(uri, optio }; } + const { schemaMap, encryptedFieldsMap } = this._buildEncryptionSchemas(); + + if (Object.keys(schemaMap).length > 0) { + options.autoEncryption.schemaMap = schemaMap; + } + + if (Object.keys(encryptedFieldsMap).length > 0) { + options.autoEncryption.encryptedFieldsMap = encryptedFieldsMap; + } + this.readyState = STATES.connecting; this._connectionString = uri; @@ -338,6 +354,55 @@ NativeConnection.prototype.createClient = async function createClient(uri, optio return this; }; +/** + * Given a connection, which may or may not have encrypted models, build + * a schemaMap and/or an encryptedFieldsMap for the connection, combining all models + * into a single schemaMap and encryptedFields map. + * + * @returns a copy of the options object with a schemaMap and/or an encryptedFieldsMap added to the options' autoEncryption + * options. + */ +NativeConnection.prototype._buildEncryptionSchemas = function() { + const qeMappings = {}; + const csfleMappings = {}; + + // If discriminators are configured for the collection, there might be multiple models + // pointing to the same namespace. For this scenario, we merge all the schemas for each namespace + // into a single schema. + // Notably, this doesn't allow for discriminators to declare multiple values on the same fields. + for (const model of Object.values(this.models)) { + const { schema, collection: { collectionName } } = model; + const namespace = `${this.$dbName}.${collectionName}`; + if (schema.encryptionType() === 'csfle') { + csfleMappings[namespace] ??= new Schema({}, { encryptionType: 'csfle' }); + csfleMappings[namespace].add(schema); + } else if (schema.encryptionType() === 'queryableEncryption') { + qeMappings[namespace] ??= new Schema({}, { encryptionType: 'queryableEncryption' }); + qeMappings[namespace].add(schema); + } + } + + const schemaMap = Object.entries(csfleMappings).reduce( + (schemaMap, [namespace, schema]) => { + schemaMap[namespace] = schema._buildSchemaMap(); + return schemaMap; + }, + {} + ); + + const encryptedFieldsMap = Object.entries(qeMappings).reduce( + (encryptedFieldsMap, [namespace, schema]) => { + encryptedFieldsMap[namespace] = schema._buildEncryptedFields(); + return encryptedFieldsMap; + }, + {} + ); + + return { + schemaMap, encryptedFieldsMap + }; +}; + /*! * ignore */ @@ -358,7 +423,7 @@ NativeConnection.prototype.setClient = function setClient(client) { for (const model of Object.values(this.models)) { // Errors handled internally, so safe to ignore error - model.init().catch(function $modelInitNoop() {}); + model.init().catch(function $modelInitNoop() { }); } return this; @@ -401,9 +466,9 @@ function _setClient(conn, client, options, dbName) { }; const type = client && - client.topology && - client.topology.description && - client.topology.description.type || ''; + client.topology && + client.topology.description && + client.topology.description.type || ''; if (type === 'Single') { client.on('serverDescriptionChanged', ev => { @@ -426,6 +491,9 @@ function _setClient(conn, client, options, dbName) { } }); } + + conn._lastHeartbeatAt = null; + client.on('serverHeartbeatSucceeded', () => { conn._lastHeartbeatAt = Date.now(); }); diff --git a/lib/drivers/node-mongodb-native/index.js b/lib/drivers/node-mongodb-native/index.js index de82be5986b..38e2dd6aca7 100644 --- a/lib/drivers/node-mongodb-native/index.js +++ b/lib/drivers/node-mongodb-native/index.js @@ -4,5 +4,6 @@ 'use strict'; +exports.BulkWriteResult = require('./bulkWriteResult'); exports.Collection = require('./collection'); exports.Connection = require('./connection'); diff --git a/lib/encryptionUtils.js b/lib/encryptionUtils.js new file mode 100644 index 00000000000..f0c46dee716 --- /dev/null +++ b/lib/encryptionUtils.js @@ -0,0 +1,72 @@ +'use strict'; + +const schemaTypes = require('./schema/index.js'); +const SchemaBigInt = require('./schema/bigint'); +const SchemaBoolean = require('./schema/boolean'); +const SchemaBuffer = require('./schema/buffer'); +const SchemaDate = require('./schema/date'); +const SchemaDecimal128 = require('./schema/decimal128'); +const SchemaDouble = require('./schema/double'); +const SchemaInt32 = require('./schema/int32'); +const SchemaObjectId = require('./schema/objectId'); +const SchemaString = require('./schema/string'); + +/** + * Given a schema and a path to a field in the schema, this returns the + * BSON type of the field, if it can be determined. This method specifically + * **only** handles BSON types that are used for CSFLE and QE - any other + * BSON types will return `null`. (example: MinKey and MaxKey). + * + * @param {import('.').Schema} schema + * @param {string} path + * @returns {string} + */ +function inferBSONType(schema, path) { + const type = schema.path(path); + + if (type instanceof SchemaString) { + return 'string'; + } + + if (type instanceof SchemaInt32) { + return 'int'; + } + + if (type instanceof SchemaBigInt) { + return 'long'; + } + + if (type instanceof SchemaBoolean) { + return 'bool'; + } + + if (type instanceof SchemaDate) { + return 'date'; + } + + if (type instanceof SchemaBuffer) { + return 'binData'; + } + + if (type instanceof SchemaObjectId) { + return 'objectId'; + } + + if (type instanceof SchemaDecimal128) { + return 'decimal'; + } + + if (type instanceof SchemaDouble) { + return 'double'; + } + + if (type instanceof schemaTypes.Array) { + return 'array'; + } + + return null; +} + +module.exports = { + inferBSONType +}; diff --git a/lib/encryption_utils.js b/lib/encryption_utils.js new file mode 100644 index 00000000000..1f17fa5032b --- /dev/null +++ b/lib/encryption_utils.js @@ -0,0 +1,72 @@ +'use strict'; + +const { Array } = require('./schema/index.js'); +const SchemaBigInt = require('./schema/bigint'); +const SchemaBoolean = require('./schema/boolean'); +const SchemaBuffer = require('./schema/buffer'); +const SchemaDate = require('./schema/date'); +const SchemaDecimal128 = require('./schema/decimal128'); +const SchemaDouble = require('./schema/double'); +const SchemaInt32 = require('./schema/int32'); +const SchemaObjectId = require('./schema/objectId'); +const SchemaString = require('./schema/string'); + +/** + * Given a schema and a path to a field in the schema, this returns the + * BSON type of the field, if it can be determined. This method specifically + * **only** handles BSON types that are used for CSFLE and QE - any other + * BSON types will return `null`. (example: MinKey and MaxKey). + * + * @param {import('.').Schema} schema + * @param {string} path + * @returns + */ +function inferBSONType(schema, path) { + const type = schema.path(path); + + if (type instanceof SchemaString) { + return 'string'; + } + + if (type instanceof SchemaInt32) { + return 'int'; + } + + if (type instanceof SchemaBigInt) { + return 'long'; + } + + if (type instanceof SchemaBoolean) { + return 'bool'; + } + + if (type instanceof SchemaDate) { + return 'date'; + } + + if (type instanceof SchemaBuffer) { + return 'binData'; + } + + if (type instanceof SchemaObjectId) { + return 'objectId'; + } + + if (type instanceof SchemaDecimal128) { + return 'decimal'; + } + + if (type instanceof SchemaDouble) { + return 'double'; + } + + if (type instanceof Array) { + return 'array'; + } + + return null; +} + +module.exports = { + inferBSONType +}; diff --git a/lib/helpers/clone.js b/lib/helpers/clone.js index a8dd587dbf9..a19e7a6238f 100644 --- a/lib/helpers/clone.js +++ b/lib/helpers/clone.js @@ -147,7 +147,7 @@ function cloneObject(obj, options, isArrayChild) { } else if (seen) { seen.set(obj, ret); } - if (trustedSymbol in obj) { + if (trustedSymbol in obj && options?.copyTrustedSymbol !== false) { ret[trustedSymbol] = obj[trustedSymbol]; } diff --git a/lib/helpers/document/applyDefaults.js b/lib/helpers/document/applyDefaults.js index 258e570ec30..7d1b5b02382 100644 --- a/lib/helpers/document/applyDefaults.js +++ b/lib/helpers/document/applyDefaults.js @@ -19,6 +19,9 @@ module.exports = function applyDefaults(doc, fields, exclude, hasIncludedChildre const type = doc.$__schema.paths[p]; const path = type.splitPath(); const len = path.length; + if (path[len - 1] === '$*') { + continue; + } let included = false; let doc_ = doc._doc; for (let j = 0; j < len; ++j) { diff --git a/lib/helpers/getDefaultBulkwriteResult.js b/lib/helpers/getDefaultBulkwriteResult.js index 855cf7a1bd8..52ed5bfe772 100644 --- a/lib/helpers/getDefaultBulkwriteResult.js +++ b/lib/helpers/getDefaultBulkwriteResult.js @@ -1,26 +1,17 @@ 'use strict'; + function getDefaultBulkwriteResult() { return { - result: { - ok: 1, - writeErrors: [], - writeConcernErrors: [], - insertedIds: [], - nInserted: 0, - nUpserted: 0, - nMatched: 0, - nModified: 0, - nRemoved: 0, - upserted: [] - }, - insertedCount: 0, - matchedCount: 0, - modifiedCount: 0, - deletedCount: 0, - upsertedCount: 0, - upsertedIds: {}, - insertedIds: {}, - n: 0 + ok: 1, + nInserted: 0, + nUpserted: 0, + nMatched: 0, + nModified: 0, + nRemoved: 0, + upserted: [], + writeErrors: [], + insertedIds: [], + writeConcernErrors: [] }; } diff --git a/lib/helpers/model/decorateBulkWriteResult.js b/lib/helpers/model/decorateBulkWriteResult.js new file mode 100644 index 00000000000..6cc926358fb --- /dev/null +++ b/lib/helpers/model/decorateBulkWriteResult.js @@ -0,0 +1,8 @@ +'use strict'; + +module.exports = function decorateBulkWriteResult(resultOrError, validationErrors, results) { + resultOrError.mongoose = resultOrError.mongoose || {}; + resultOrError.mongoose.validationErrors = validationErrors; + resultOrError.mongoose.results = results; + return resultOrError; +}; diff --git a/lib/helpers/populate/getSchemaTypes.js b/lib/helpers/populate/getSchemaTypes.js index 8cbbcafa8b1..8bf3285ab5e 100644 --- a/lib/helpers/populate/getSchemaTypes.js +++ b/lib/helpers/populate/getSchemaTypes.js @@ -178,8 +178,8 @@ module.exports = function getSchemaTypes(model, schema, doc, path) { } const fullPath = nestedPath.concat([trypath]).join('.'); - if (topLevelDoc != null && topLevelDoc.$__ && topLevelDoc.$populated(fullPath) && p < parts.length) { - const model = doc.$__.populated[fullPath].options[populateModelSymbol]; + if (topLevelDoc != null && topLevelDoc.$__ && topLevelDoc.$populated(fullPath, true) && p < parts.length) { + const model = topLevelDoc.$populated(fullPath, true).options[populateModelSymbol]; if (model != null) { const ret = search( parts.slice(p), diff --git a/lib/helpers/query/castUpdate.js b/lib/helpers/query/castUpdate.js index 3cf30cb0e17..f0819b3a586 100644 --- a/lib/helpers/query/castUpdate.js +++ b/lib/helpers/query/castUpdate.js @@ -82,6 +82,16 @@ module.exports = function castUpdate(schema, obj, options, context, filter) { schema = schema.discriminators[discriminatorValue] || (byValue && byValue.schema) || schema; + } else if (schema != null && + options.overwriteDiscriminatorKey && + obj.$set != null && + utils.hasUserDefinedProperty(obj.$set, schema.options.discriminatorKey) && + schema.discriminators != null) { + const discriminatorValue = obj.$set[schema.options.discriminatorKey]; + const byValue = getDiscriminatorByValue(context.model.discriminators, discriminatorValue); + schema = schema.discriminators[discriminatorValue] || + (byValue && byValue.schema) || + schema; } if (options.upsert) { diff --git a/lib/model.js b/lib/model.js index 3635f93b9fd..7536609e4fa 100644 --- a/lib/model.js +++ b/lib/model.js @@ -10,7 +10,6 @@ const Document = require('./document'); const DocumentNotFoundError = require('./error/notFound'); const EventEmitter = require('events').EventEmitter; const Kareem = require('kareem'); -const { MongoBulkWriteError } = require('mongodb'); const MongooseBulkWriteError = require('./error/bulkWriteError'); const MongooseError = require('./error/index'); const ObjectParameterError = require('./error/objectParameter'); @@ -64,12 +63,12 @@ const prepareDiscriminatorPipeline = require('./helpers/aggregate/prepareDiscrim const pushNestedArrayPaths = require('./helpers/model/pushNestedArrayPaths'); const removeDeselectedForeignField = require('./helpers/populate/removeDeselectedForeignField'); const setDottedPath = require('./helpers/path/setDottedPath'); -const STATES = require('./connectionState'); const util = require('util'); const utils = require('./utils'); const minimize = require('./helpers/minimize'); const MongooseBulkSaveIncompleteError = require('./error/bulkSaveIncompleteError'); const ObjectExpectedError = require('./error/objectExpected'); +const decorateBulkWriteResult = require('./helpers/model/decorateBulkWriteResult'); const modelCollectionSymbol = Symbol('mongoose#Model#collection'); const modelDbSymbol = Symbol('mongoose#Model#db'); @@ -1104,20 +1103,28 @@ Model.init = function init() { return results; }; const _createCollection = async() => { - if ((conn.readyState === STATES.connecting || conn.readyState === STATES.disconnected) && conn._shouldBufferCommands()) { - await new Promise(resolve => { - conn._queue.push({ fn: resolve }); - }); - } - const autoCreate = utils.getOption( + let autoCreate = utils.getOption( 'autoCreate', this.schema.options, - conn.config, - conn.base.options + conn.config + // No base.options here because we don't want to take the base value if the connection hasn't + // set it yet ); + if (autoCreate == null) { + // `autoCreate` may later be set when the connection is opened, so wait for connect before checking + await conn._waitForConnect(true); + autoCreate = utils.getOption( + 'autoCreate', + this.schema.options, + conn.config, + conn.base.options + ); + } + if (!autoCreate) { return; } + return await this.createCollection(); }; @@ -3097,11 +3104,9 @@ Model.$__insertMany = function(arr, options, callback) { const res = { acknowledged: true, insertedCount: 0, - insertedIds: {}, - mongoose: { - validationErrors: validationErrors - } + insertedIds: {} }; + decorateBulkWriteResult(res, validationErrors, validationErrors); return callback(null, res); } callback(null, []); @@ -3154,10 +3159,7 @@ Model.$__insertMany = function(arr, options, callback) { // Decorate with mongoose validation errors in case of unordered, // because then still do `insertMany()` - res.mongoose = { - validationErrors: validationErrors, - results: results - }; + decorateBulkWriteResult(res, validationErrors, results); } return callback(null, res); } @@ -3191,10 +3193,7 @@ Model.$__insertMany = function(arr, options, callback) { if (error.writeErrors != null) { for (let i = 0; i < error.writeErrors.length; ++i) { const originalIndex = validDocIndexToOriginalIndex.get(error.writeErrors[i].index); - error.writeErrors[i] = { - ...error.writeErrors[i], - index: originalIndex - }; + error.writeErrors[i] = { ...error.writeErrors[i], index: originalIndex }; if (!ordered) { results[originalIndex] = error.writeErrors[i]; } @@ -3238,10 +3237,7 @@ Model.$__insertMany = function(arr, options, callback) { }); if (rawResult && ordered === false) { - error.mongoose = { - validationErrors: validationErrors, - results: results - }; + decorateBulkWriteResult(error, validationErrors, results); } callback(error, null); @@ -3392,7 +3388,11 @@ Model.bulkWrite = async function bulkWrite(ops, options) { const ordered = options.ordered == null ? true : options.ordered; if (ops.length === 0) { - return getDefaultBulkwriteResult(); + const BulkWriteResult = this.base.driver.get().BulkWriteResult; + const bulkWriteResult = new BulkWriteResult(getDefaultBulkwriteResult(), false); + bulkWriteResult.n = 0; + decorateBulkWriteResult(bulkWriteResult, [], []); + return bulkWriteResult; } const validations = ops.map(op => castBulkWrite(this, op, options)); @@ -3463,7 +3463,11 @@ Model.bulkWrite = async function bulkWrite(ops, options) { 'bulkWrite' ); } - return getDefaultBulkwriteResult(); + const BulkWriteResult = this.base.driver.get().BulkWriteResult; + const bulkWriteResult = new BulkWriteResult(getDefaultBulkwriteResult(), false); + bulkWriteResult.result = getDefaultBulkwriteResult(); + decorateBulkWriteResult(bulkWriteResult, validationErrors, results); + return bulkWriteResult; } let error; @@ -3471,10 +3475,18 @@ Model.bulkWrite = async function bulkWrite(ops, options) { then(res => ([res, null])). catch(error => ([null, error])); + const writeErrorsByIndex = {}; + if (error?.writeErrors) { + for (const writeError of error.writeErrors) { + writeErrorsByIndex[writeError.err.index] = writeError; + } + } + for (let i = 0; i < validOpIndexes.length; ++i) { + results[validOpIndexes[i]] = writeErrorsByIndex[i] ?? null; + } if (error) { if (validationErrors.length > 0) { - error.mongoose = error.mongoose || {}; - error.mongoose.validationErrors = validationErrors; + decorateBulkWriteResult(error, validationErrors, results); } await new Promise((resolve, reject) => { @@ -3488,9 +3500,6 @@ Model.bulkWrite = async function bulkWrite(ops, options) { }); } - for (let i = 0; i < validOpIndexes.length; ++i) { - results[validOpIndexes[i]] = null; - } if (validationErrors.length > 0) { if (options.throwOnValidationError) { throw new MongooseBulkWriteError( @@ -3500,9 +3509,7 @@ Model.bulkWrite = async function bulkWrite(ops, options) { 'bulkWrite' ); } else { - res.mongoose = res.mongoose || {}; - res.mongoose.validationErrors = validationErrors; - res.mongoose.results = results; + decorateBulkWriteResult(res, validationErrors, results); } } } @@ -3568,7 +3575,7 @@ Model.bulkSave = async function bulkSave(documents, options) { (err) => ({ bulkWriteResult: null, bulkWriteError: err }) ); // If not a MongoBulkWriteError, treat this as all documents failed to save. - if (bulkWriteError != null && !(bulkWriteError instanceof MongoBulkWriteError)) { + if (bulkWriteError != null && bulkWriteError.name !== 'MongoBulkWriteError') { throw bulkWriteError; } @@ -3982,6 +3989,10 @@ Model.hydrate = function(obj, projection, options) { * res.upsertedId; // null or an id containing a document that had to be upserted. * res.upsertedCount; // Number indicating how many documents had to be upserted. Will either be 0 or 1. * + * // Other supported syntaxes + * await Person.find({ name: /Stark$/ }).updateMany({ isDeleted: true }); // Using chaining syntax + * await Person.find().updateMany({ isDeleted: true }); // Set `isDeleted` on _all_ Person documents + * * This function triggers the following middleware. * * - `updateMany()` @@ -4002,10 +4013,14 @@ Model.hydrate = function(obj, projection, options) { * @api public */ -Model.updateMany = function updateMany(conditions, doc, options) { +Model.updateMany = function updateMany(conditions, update, options) { _checkContext(this, 'updateMany'); - return _update(this, 'updateMany', conditions, doc, options); + if (update == null) { + throw new MongooseError('updateMany `update` parameter cannot be nullish'); + } + + return _update(this, 'updateMany', conditions, update, options); }; /** @@ -4022,6 +4037,10 @@ Model.updateMany = function updateMany(conditions, doc, options) { * res.upsertedId; // null or an id containing a document that had to be upserted. * res.upsertedCount; // Number indicating how many documents had to be upserted. Will either be 0 or 1. * + * // Other supported syntaxes + * await Person.findOne({ name: 'Jean-Luc Picard' }).updateOne({ ship: 'USS Enterprise' }); // Using chaining syntax + * await Person.updateOne({ ship: 'USS Enterprise' }); // Updates first doc's `ship` property + * * This function triggers the following middleware. * * - `updateOne()` @@ -4362,6 +4381,7 @@ Model.validate = async function validate(obj, pathsOrOptions, context) { * @param {Object} [options.options=null] Additional options like `limit` and `lean`. * @param {Function} [options.transform=null] Function that Mongoose will call on every populated document that allows you to transform the populated document. * @param {Boolean} [options.forceRepopulate=true] Set to `false` to prevent Mongoose from repopulating paths that are already populated + * @param {Boolean} [options.ordered=false] Set to `true` to execute any populate queries one at a time, as opposed to in parallel. Set this option to `true` if populating multiple paths or paths with multiple models in transactions. * @return {Promise} * @api public */ @@ -4379,11 +4399,21 @@ Model.populate = async function populate(docs, paths) { } // each path has its own query options and must be executed separately - const promises = []; - for (const path of paths) { - promises.push(_populatePath(this, docs, path)); + if (paths.find(p => p.ordered)) { + // Populate in series, primarily for transactions because MongoDB doesn't support multiple operations on + // one transaction in parallel. + // Note that if _any_ path has `ordered`, we make the top-level populate `ordered` as well. + for (const path of paths) { + await _populatePath(this, docs, path); + } + } else { + // By default, populate in parallel + const promises = []; + for (const path of paths) { + promises.push(_populatePath(this, docs, path)); + } + await Promise.all(promises); } - await Promise.all(promises); return docs; }; @@ -4503,12 +4533,22 @@ async function _populatePath(model, docs, populateOptions) { return; } - const promises = []; - for (const arr of params) { - promises.push(_execPopulateQuery.apply(null, arr).then(valsFromDb => { vals = vals.concat(valsFromDb); })); + if (populateOptions.ordered) { + // Populate in series, primarily for transactions because MongoDB doesn't support multiple operations on + // one transaction in parallel. + for (const arr of params) { + await _execPopulateQuery.apply(null, arr).then(valsFromDb => { vals = vals.concat(valsFromDb); }); + } + } else { + // By default, populate in parallel + const promises = []; + for (const arr of params) { + promises.push(_execPopulateQuery.apply(null, arr).then(valsFromDb => { vals = vals.concat(valsFromDb); })); + } + + await Promise.all(promises); } - await Promise.all(promises); for (const arr of params) { const mod = arr[0]; diff --git a/lib/query.js b/lib/query.js index 067a6e020ed..af8efb2f078 100644 --- a/lib/query.js +++ b/lib/query.js @@ -65,6 +65,25 @@ const queryOptionMethods = new Set([ 'wtimeout' ]); +// Map from operation name to the name of the function that executes the actual operation against MongoDB. +// Called a thunk for legacy reasons, "thunk" means function that takes exactly 1 param, a callback. +// Currently `_countDocuments()`, etc. are async functions that take no params. +const opToThunk = new Map([ + ['countDocuments', '_countDocuments'], + ['distinct', '__distinct'], + ['estimatedDocumentCount', '_estimatedDocumentCount'], + ['find', '_find'], + ['findOne', '_findOne'], + ['findOneAndReplace', '_findOneAndReplace'], + ['findOneAndUpdate', '_findOneAndUpdate'], + ['replaceOne', '_replaceOne'], + ['updateMany', '_updateMany'], + ['updateOne', '_updateOne'], + ['deleteMany', '_deleteMany'], + ['deleteOne', '_deleteOne'], + ['findOneAndDelete', '_findOneAndDelete'] +]); + /** * Query constructor used for building queries. You do not need * to instantiate a `Query` directly. Instead use Model functions like @@ -2337,18 +2356,17 @@ Query.prototype._find = async function _find() { } const mongooseOptions = this._mongooseOptions; - const _this = this; - const userProvidedFields = _this._userProvidedFields || {}; + const userProvidedFields = this._userProvidedFields || {}; applyGlobalMaxTimeMS(this.options, this.model.db.options, this.model.base.options); applyGlobalDiskUse(this.options, this.model.db.options, this.model.base.options); // Separate options to pass down to `completeMany()` in case we need to // set a session on the document - const completeManyOptions = Object.assign({}, { + const completeManyOptions = { session: this && this.options && this.options.session || null, lean: mongooseOptions.lean || null - }); + }; const options = this._optionsForExec(); @@ -2366,7 +2384,7 @@ Query.prototype._find = async function _find() { } if (!mongooseOptions.populate) { - const versionKey = _this.schema.options.versionKey; + const versionKey = this.schema.options.versionKey; if (mongooseOptions.lean && mongooseOptions.lean.versionKey === false && versionKey) { docs.forEach((doc) => { if (versionKey in doc) { @@ -2375,17 +2393,17 @@ Query.prototype._find = async function _find() { }); } return mongooseOptions.lean ? - _completeManyLean(_this.model.schema, docs, null, completeManyOptions) : - _this._completeMany(docs, fields, userProvidedFields, completeManyOptions); + _completeManyLean(this.model.schema, docs, null, completeManyOptions) : + this._completeMany(docs, fields, userProvidedFields, completeManyOptions); } - const pop = helpers.preparePopulationOptionsMQ(_this, mongooseOptions); + const pop = helpers.preparePopulationOptionsMQ(this, mongooseOptions); if (mongooseOptions.lean) { - return _this.model.populate(docs, pop); + return this.model.populate(docs, pop); } - docs = await _this._completeMany(docs, fields, userProvidedFields, completeManyOptions); + docs = await this._completeMany(docs, fields, userProvidedFields, completeManyOptions); await this.model.populate(docs, pop); return docs; @@ -3992,6 +4010,10 @@ Query.prototype._replaceOne = async function _replaceOne() { * res.n; // Number of documents matched * res.nModified; // Number of documents modified * + * // Other supported syntaxes + * await Person.find({ name: /Stark$/ }).updateMany({ isDeleted: true }); // Using chaining syntax + * await Person.find().updateMany({ isDeleted: true }); // Set `isDeleted` on _all_ Person documents + * * This function triggers the following middleware. * * - `updateMany()` @@ -4062,6 +4084,10 @@ Query.prototype.updateMany = function(conditions, doc, options, callback) { * res.upsertedCount; // Number of documents that were upserted * res.upsertedId; // Identifier of the inserted document (if an upsert took place) * + * // Other supported syntaxes + * await Person.findOne({ name: 'Jean-Luc Picard' }).updateOne({ ship: 'USS Enterprise' }); // Using chaining syntax + * await Person.updateOne({ ship: 'USS Enterprise' }); // Updates first doc's `ship` property + * * This function triggers the following middleware. * * - `updateOne()` @@ -4389,22 +4415,14 @@ Query.prototype.exec = async function exec(op) { if (this.model == null) { throw new MongooseError('Query must have an associated model before executing'); } - this._validateOp(); - if (!this.op) { - return; + const thunk = opToThunk.get(this.op); + if (!thunk) { + throw new MongooseError('Query has invalid `op`: "' + this.op + '"'); } - if (this.options && this.options.sort) { - const keys = Object.keys(this.options.sort); - if (keys.includes('')) { - throw new Error('Invalid field "" passed to sort()'); - } - } - - let thunk = '_' + this.op; - if (this.op === 'distinct') { - thunk = '__distinct'; + if (this.options && this.options.sort && typeof this.options.sort === 'object' && this.options.sort.hasOwnProperty('')) { + throw new Error('Invalid field "" passed to sort()'); } if (this._executionStack != null) { diff --git a/lib/schema.js b/lib/schema.js index 0204c6cc9c4..691e187f223 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -25,6 +25,7 @@ const setPopulatedVirtualValue = require('./helpers/populate/setPopulatedVirtual const setupTimestamps = require('./helpers/timestamps/setupTimestamps'); const utils = require('./utils'); const validateRef = require('./helpers/populate/validateRef'); +const { inferBSONType } = require('./encryptionUtils'); const hasNumericSubpathRegex = /\.\d+(\.|$)/; @@ -86,6 +87,7 @@ const numberRE = /^\d+$/; * - [pluginTags](https://mongoosejs.com/docs/guide.html#pluginTags): array of strings - defaults to `undefined`. If set and plugin called with `tags` option, will only apply that plugin to schemas with a matching tag. * - [virtuals](https://mongoosejs.com/docs/tutorials/virtuals.html#virtuals-via-schema-options): object - virtuals to define, alias for [`.virtual`](https://mongoosejs.com/docs/api/schema.html#Schema.prototype.virtual()) * - [collectionOptions]: object with options passed to [`createCollection()`](https://www.mongodb.com/docs/manual/reference/method/db.createCollection/) when calling `Model.createCollection()` or `autoCreate` set to true. + * - [encryptionType]: the encryption type for the schema. Valid options are `csfle` or `queryableEncryption`. See https://mongoosejs.com/docs/field-level-encryption. * * #### Options for Nested Schemas: * @@ -128,6 +130,7 @@ function Schema(obj, options) { // For internal debugging. Do not use this to try to save a schema in MDB. this.$id = ++id; this.mapPaths = []; + this.encryptedFields = {}; this.s = { hooks: new Kareem() @@ -166,7 +169,7 @@ function Schema(obj, options) { // ensure the documents get an auto _id unless disabled const auto_id = !this.paths['_id'] && - (this.options._id) && !_idSubDoc; + (this.options._id) && !_idSubDoc; if (auto_id) { addAutoId(this); @@ -463,6 +466,8 @@ Schema.prototype._clone = function _clone(Constructor) { s.aliases = Object.assign({}, this.aliases); + s.encryptedFields = clone(this.encryptedFields); + return s; }; @@ -495,7 +500,17 @@ Schema.prototype.pick = function(paths, options) { } for (const path of paths) { - if (this.nested[path]) { + if (path in this.encryptedFields) { + const encrypt = this.encryptedFields[path]; + const schemaType = this.path(path); + newSchema.add({ + [path]: { + encrypt, + [this.options.typeKey]: schemaType + } + }); + } + else if (this.nested[path]) { newSchema.add({ [path]: get(this.tree, path) }); } else { const schematype = this.path(path); @@ -506,6 +521,10 @@ Schema.prototype.pick = function(paths, options) { } } + if (!this._hasEncryptedFields()) { + newSchema.options.encryptionType = null; + } + return newSchema; }; @@ -534,9 +553,9 @@ Schema.prototype.omit = function(paths, options) { if (!Array.isArray(paths)) { throw new MongooseError( 'Schema#omit() only accepts an array argument, ' + - 'got "' + - typeof paths + - '"' + 'got "' + + typeof paths + + '"' ); } @@ -667,6 +686,20 @@ Schema.prototype._defaultToObjectOptions = function(json) { return defaultOptions; }; +/** + * Sets the encryption type of the schema, if a value is provided, otherwise + * returns the encryption type. + * + * @param {'csfle' | 'queryableEncryption' | undefined} encryptionType plain object with paths to add, or another schema + */ +Schema.prototype.encryptionType = function encryptionType(encryptionType) { + if (typeof encryptionType === 'string' || encryptionType === null) { + this.options.encryptionType = encryptionType; + } else { + return this.options.encryptionType; + } +}; + /** * Adds key path / schema type pairs to this schema. * @@ -735,7 +768,7 @@ Schema.prototype.add = function add(obj, prefix) { if ( key !== '_id' && ((typeof val !== 'object' && typeof val !== 'function' && !isMongooseTypeString) || - val == null) + val == null) ) { throw new TypeError(`Invalid schema configuration: \`${val}\` is not ` + `a valid type at path \`${key}\`. See ` + @@ -818,15 +851,128 @@ Schema.prototype.add = function add(obj, prefix) { } } } + + if (val.instanceOfSchema && val.encryptionType() != null) { + // schema.add({ field: }) + if (this.encryptionType() != val.encryptionType()) { + throw new Error('encryptionType of a nested schema must match the encryption type of the parent schema.'); + } + + for (const [encryptedField, encryptedFieldConfig] of Object.entries(val.encryptedFields)) { + const path = fullPath + '.' + encryptedField; + this._addEncryptedField(path, encryptedFieldConfig); + } + } + else if (typeof val === 'object' && 'encrypt' in val) { + // schema.add({ field: { type: , encrypt: { ... }}}) + const { encrypt } = val; + + if (this.encryptionType() == null) { + throw new Error('encryptionType must be provided'); + } + + this._addEncryptedField(fullPath, encrypt); + } else { + // if the field was already encrypted and we re-configure it to be unencrypted, remove + // the encrypted field configuration + this._removeEncryptedField(fullPath); + } } const aliasObj = Object.fromEntries( Object.entries(obj).map(([key]) => ([prefix + key, null])) ); aliasFields(this, aliasObj); + return this; }; +/** + * @param {string} path + * @param {object} fieldConfig + * + * @api private + */ +Schema.prototype._addEncryptedField = function _addEncryptedField(path, fieldConfig) { + const type = this.path(path).autoEncryptionType(); + if (type == null) { + throw new Error(`Invalid BSON type for FLE field: '${path}'`); + } + + this.encryptedFields[path] = clone(fieldConfig); +}; + +/** + * @api private + */ +Schema.prototype._removeEncryptedField = function _removeEncryptedField(path) { + delete this.encryptedFields[path]; +}; + +/** + * @api private + */ +Schema.prototype._hasEncryptedFields = function _hasEncryptedFields() { + return Object.keys(this.encryptedFields).length > 0; +}; + +Schema.prototype._buildEncryptedFields = function() { + const fields = Object.entries(this.encryptedFields).map( + ([path, config]) => { + const bsonType = inferBSONType(this, path); + // { path, bsonType, keyId, queries? } + return { path, bsonType, ...config }; + }); + + return { fields }; +}; + +Schema.prototype._buildSchemaMap = function() { + /** + * `schemaMap`s are JSON schemas, which use the following structure to represent objects: + * { field: { bsonType: 'object', properties: { ... } } } + * + * for example, a schema that looks like this `{ a: { b: int32 } }` would be encoded as + * `{ a: { bsonType: 'object', properties: { b: < encryption configuration > } } }` + * + * This function takes an array of path segments, an output object (that gets mutated) and + * a value to associated with the full path, and constructs a valid CSFLE JSON schema path for + * the object. This works for deeply nested properties as well. + * + * @param {string[]} path array of path components + * @param {object} object the object in which to build a JSON schema of `path`'s properties + * @param {object} value the value to associate with the path in object + */ + function buildNestedPath(path, object, value) { + let i = 0, component = path[i]; + for (; i < path.length - 1; ++i, component = path[i]) { + object[component] = object[component] == null ? { + bsonType: 'object', + properties: {} + } : object[component]; + object = object[component].properties; + } + object[component] = value; + } + + const schemaMapPropertyReducer = (accum, [path, propertyConfig]) => { + const bsonType = inferBSONType(this, path); + const pathComponents = path.split('.'); + const configuration = { encrypt: { ...propertyConfig, bsonType } }; + buildNestedPath(pathComponents, accum, configuration); + return accum; + }; + + const properties = Object.entries(this.encryptedFields).reduce( + schemaMapPropertyReducer, + {}); + + return { + bsonType: 'object', + properties + }; +}; + /** * Add an alias for `path`. This means getting or setting the `alias` * is equivalent to getting or setting the `path`. @@ -1008,23 +1154,23 @@ Schema.prototype.reserved = Schema.reserved; const reserved = Schema.reserved; // Core object reserved['prototype'] = -// EventEmitter -reserved.emit = -reserved.listeners = -reserved.removeListener = - -// document properties and functions -reserved.collection = -reserved.errors = -reserved.get = -reserved.init = -reserved.isModified = -reserved.isNew = -reserved.populated = -reserved.remove = -reserved.save = -reserved.toObject = -reserved.validate = 1; + // EventEmitter + reserved.emit = + reserved.listeners = + reserved.removeListener = + + // document properties and functions + reserved.collection = + reserved.errors = + reserved.get = + reserved.init = + reserved.isModified = + reserved.isNew = + reserved.populated = + reserved.remove = + reserved.save = + reserved.toObject = + reserved.validate = 1; reserved.collection = 1; /** @@ -1104,10 +1250,10 @@ Schema.prototype.path = function(path, obj) { } if (typeof branch[sub] !== 'object') { const msg = 'Cannot set nested path `' + path + '`. ' - + 'Parent path `' - + fullPath - + '` already set to type ' + branch[sub].name - + '.'; + + 'Parent path `' + + fullPath + + '` already set to type ' + branch[sub].name + + '.'; throw new Error(msg); } branch = branch[sub]; @@ -1118,6 +1264,9 @@ Schema.prototype.path = function(path, obj) { this.paths[path] = this.interpretAsType(path, obj, this.options); const schemaType = this.paths[path]; + // If overwriting an existing path, make sure to clear the childSchemas + this.childSchemas = this.childSchemas.filter(childSchema => childSchema.path !== path); + if (schemaType.$isSchemaMap) { // Maps can have arbitrary keys, so `$*` is internal shorthand for "any key" // The '$' is to imply this path should never be stored in MongoDB so we @@ -1375,6 +1524,16 @@ Schema.prototype.interpretAsType = function(path, obj, options) { let type = obj[options.typeKey] && (obj[options.typeKey] instanceof Function || options.typeKey !== 'type' || !obj.type.type) ? obj[options.typeKey] : {}; + + if (type instanceof SchemaType) { + if (type.path === path) { + return type; + } + const clone = type.clone(); + clone.path = path; + return clone; + } + let name; if (utils.isPOJO(type) || type === 'mixed') { @@ -1404,8 +1563,8 @@ Schema.prototype.interpretAsType = function(path, obj, options) { return new MongooseTypes.DocumentArray(path, cast, obj); } if (cast && - cast[options.typeKey] && - cast[options.typeKey].instanceOfSchema) { + cast[options.typeKey] && + cast[options.typeKey].instanceOfSchema) { if (!(cast[options.typeKey] instanceof Schema)) { if (this.options._isMerging) { cast[options.typeKey] = new Schema(cast[options.typeKey]); @@ -1739,7 +1898,7 @@ Schema.prototype.hasMixedParent = function(path) { for (let i = 0; i < subpaths.length; ++i) { path = i > 0 ? path + '.' + subpaths[i] : subpaths[i]; if (this.paths.hasOwnProperty(path) && - this.paths[path] instanceof MongooseTypes.Mixed) { + this.paths[path] instanceof MongooseTypes.Mixed) { return this.paths[path]; } } @@ -2520,6 +2679,8 @@ Schema.prototype.remove = function(path) { delete this.paths[name]; _deletePath(this, name); + + this._removeEncryptedField(name); }, this); } return this; @@ -2615,9 +2776,9 @@ Schema.prototype.removeVirtual = function(path) { Schema.prototype.loadClass = function(model, virtualsOnly) { // Stop copying when hit certain base classes if (model === Object.prototype || - model === Function.prototype || - model.prototype.hasOwnProperty('$isMongooseModelPrototype') || - model.prototype.hasOwnProperty('$isMongooseDocumentPrototype')) { + model === Function.prototype || + model.prototype.hasOwnProperty('$isMongooseModelPrototype') || + model.prototype.hasOwnProperty('$isMongooseDocumentPrototype')) { return this; } diff --git a/lib/schema/array.js b/lib/schema/array.js index 06b1e988cb8..9e689ec5201 100644 --- a/lib/schema/array.js +++ b/lib/schema/array.js @@ -718,6 +718,10 @@ SchemaArray.prototype.toJSONSchema = function toJSONSchema(options) { }; }; +SchemaArray.prototype.autoEncryptionType = function autoEncryptionType() { + return 'array'; +}; + /*! * Module exports. */ diff --git a/lib/schema/bigint.js b/lib/schema/bigint.js index 474d77461fd..be937eafbf5 100644 --- a/lib/schema/bigint.js +++ b/lib/schema/bigint.js @@ -254,6 +254,10 @@ SchemaBigInt.prototype.toJSONSchema = function toJSONSchema(options) { return createJSONSchemaTypeDefinition('string', 'long', options?.useBsonType, isRequired); }; +SchemaBigInt.prototype.autoEncryptionType = function autoEncryptionType() { + return 'int64'; +}; + /*! * Module exports. */ diff --git a/lib/schema/boolean.js b/lib/schema/boolean.js index b11162621f0..ed478b95bf8 100644 --- a/lib/schema/boolean.js +++ b/lib/schema/boolean.js @@ -304,6 +304,10 @@ SchemaBoolean.prototype.toJSONSchema = function toJSONSchema(options) { return createJSONSchemaTypeDefinition('boolean', 'bool', options?.useBsonType, isRequired); }; +SchemaBoolean.prototype.autoEncryptionType = function autoEncryptionType() { + return 'boolean'; +}; + /*! * Module exports. */ diff --git a/lib/schema/buffer.js b/lib/schema/buffer.js index 8111956fb95..f9d3027367d 100644 --- a/lib/schema/buffer.js +++ b/lib/schema/buffer.js @@ -314,6 +314,10 @@ SchemaBuffer.prototype.toJSONSchema = function toJSONSchema(options) { return createJSONSchemaTypeDefinition('string', 'binData', options?.useBsonType, isRequired); }; +SchemaBuffer.prototype.autoEncryptionType = function autoEncryptionType() { + return 'binary'; +}; + /*! * Module exports. */ diff --git a/lib/schema/date.js b/lib/schema/date.js index 6d671f51e50..8aa20811716 100644 --- a/lib/schema/date.js +++ b/lib/schema/date.js @@ -440,6 +440,10 @@ SchemaDate.prototype.toJSONSchema = function toJSONSchema(options) { return createJSONSchemaTypeDefinition('string', 'date', options?.useBsonType, isRequired); }; +SchemaDate.prototype.autoEncryptionType = function autoEncryptionType() { + return 'date'; +}; + /*! * Module exports. */ diff --git a/lib/schema/decimal128.js b/lib/schema/decimal128.js index 3c7f3e28ca3..b3d80d54a6c 100644 --- a/lib/schema/decimal128.js +++ b/lib/schema/decimal128.js @@ -235,6 +235,10 @@ SchemaDecimal128.prototype.toJSONSchema = function toJSONSchema(options) { return createJSONSchemaTypeDefinition('string', 'decimal', options?.useBsonType, isRequired); }; +SchemaDecimal128.prototype.autoEncryptionType = function autoEncryptionType() { + return 'decimal128'; +}; + /*! * Module exports. */ diff --git a/lib/schema/double.js b/lib/schema/double.js index 23b1f33b38d..fbbf484aba2 100644 --- a/lib/schema/double.js +++ b/lib/schema/double.js @@ -218,6 +218,10 @@ SchemaDouble.prototype.toJSONSchema = function toJSONSchema(options) { return createJSONSchemaTypeDefinition('number', 'double', options?.useBsonType, isRequired); }; +SchemaDouble.prototype.autoEncryptionType = function autoEncryptionType() { + return 'double'; +}; + /*! * Module exports. */ diff --git a/lib/schema/int32.js b/lib/schema/int32.js index 7cf2c364dc5..65bfb66e174 100644 --- a/lib/schema/int32.js +++ b/lib/schema/int32.js @@ -260,6 +260,10 @@ SchemaInt32.prototype.toJSONSchema = function toJSONSchema(options) { return createJSONSchemaTypeDefinition('number', 'int', options?.useBsonType, isRequired); }; +SchemaInt32.prototype.autoEncryptionType = function autoEncryptionType() { + return 'int32'; +}; + /*! * Module exports. diff --git a/lib/schema/map.js b/lib/schema/map.js index c65f21b931b..c6de8da702b 100644 --- a/lib/schema/map.js +++ b/lib/schema/map.js @@ -91,14 +91,7 @@ class SchemaMap extends SchemaType { const isRequired = this.options.required && typeof this.options.required !== 'function'; const result = createJSONSchemaTypeDefinition('object', 'object', useBsonType, isRequired); - - if (embeddedSchemaType.schema) { - result.additionalProperties = useBsonType - ? { ...embeddedSchemaType.toJSONSchema(options) } - : { ...embeddedSchemaType.toJSONSchema(options) }; - } else { - result.additionalProperties = embeddedSchemaType.toJSONSchema(options); - } + result.additionalProperties = embeddedSchemaType.toJSONSchema(options); return result; } diff --git a/lib/schema/objectId.js b/lib/schema/objectId.js index 6eb0fbed08f..fd379e014d1 100644 --- a/lib/schema/objectId.js +++ b/lib/schema/objectId.js @@ -304,6 +304,10 @@ SchemaObjectId.prototype.toJSONSchema = function toJSONSchema(options) { return createJSONSchemaTypeDefinition('string', 'objectId', options?.useBsonType, isRequired); }; +SchemaObjectId.prototype.autoEncryptionType = function autoEncryptionType() { + return 'objectid'; +}; + /*! * Module exports. */ diff --git a/lib/schema/string.js b/lib/schema/string.js index 1e84cac6271..b2c05f374a7 100644 --- a/lib/schema/string.js +++ b/lib/schema/string.js @@ -712,6 +712,10 @@ SchemaString.prototype.toJSONSchema = function toJSONSchema(options) { return createJSONSchemaTypeDefinition('string', 'string', options?.useBsonType, isRequired); }; +SchemaString.prototype.autoEncryptionType = function autoEncryptionType() { + return 'string'; +}; + /*! * Module exports. */ diff --git a/lib/schema/uuid.js b/lib/schema/uuid.js index bb264159487..94fb6cbe682 100644 --- a/lib/schema/uuid.js +++ b/lib/schema/uuid.js @@ -7,44 +7,14 @@ const MongooseBuffer = require('../types/buffer'); const SchemaType = require('../schemaType'); const CastError = SchemaType.CastError; +const castUUID = require('../cast/uuid'); const createJSONSchemaTypeDefinition = require('../helpers/createJSONSchemaTypeDefinition'); const utils = require('../utils'); const handleBitwiseOperator = require('./operators/bitwise'); -const UUID_FORMAT = /[0-9a-f]{8}-[0-9a-f]{4}-[0-9][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}/i; +const UUID_FORMAT = castUUID.UUID_FORMAT; const Binary = MongooseBuffer.Binary; -/** - * Helper function to convert the input hex-string to a buffer - * @param {String} hex The hex string to convert - * @returns {Buffer} The hex as buffer - * @api private - */ - -function hex2buffer(hex) { - // use buffer built-in function to convert from hex-string to buffer - const buff = hex != null && Buffer.from(hex, 'hex'); - return buff; -} - -/** - * Convert a String to Binary - * @param {String} uuidStr The value to process - * @returns {MongooseBuffer} The binary to store - * @api private - */ - -function stringToBinary(uuidStr) { - // Protect against undefined & throwing err - if (typeof uuidStr !== 'string') uuidStr = ''; - const hex = uuidStr.replace(/[{}-]/g, ''); // remove extra characters - const bytes = hex2buffer(hex); - const buff = new MongooseBuffer(bytes); - buff._subtype = 4; - - return buff; -} - /** * Convert binary to a uuid string * @param {Buffer|Binary|String} uuidBin The value to process @@ -110,44 +80,7 @@ SchemaUUID.prototype.constructor = SchemaUUID; * ignore */ -SchemaUUID._cast = function(value) { - if (value == null) { - return value; - } - - function newBuffer(initbuff) { - const buff = new MongooseBuffer(initbuff); - buff._subtype = 4; - return buff; - } - - if (typeof value === 'string') { - if (UUID_FORMAT.test(value)) { - return stringToBinary(value); - } else { - throw new CastError(SchemaUUID.schemaName, value, this.path); - } - } - - if (Buffer.isBuffer(value)) { - return newBuffer(value); - } - - if (value instanceof Binary) { - return newBuffer(value.value(true)); - } - - // Re: gh-647 and gh-3030, we're ok with casting using `toString()` - // **unless** its the default Object.toString, because "[object Object]" - // doesn't really qualify as useful data - if (value.toString && value.toString !== Object.prototype.toString) { - if (UUID_FORMAT.test(value.toString())) { - return stringToBinary(value.toString()); - } - } - - throw new CastError(SchemaUUID.schemaName, value, this.path); -}; +SchemaUUID._cast = castUUID; /** * Attaches a getter for all UUID instances. diff --git a/lib/schemaType.js b/lib/schemaType.js index 22c9edbd473..5b2951e222a 100644 --- a/lib/schemaType.js +++ b/lib/schemaType.js @@ -1783,6 +1783,14 @@ SchemaType.prototype.toJSONSchema = function toJSONSchema() { throw new Error('Converting unsupported SchemaType to JSON Schema: ' + this.instance); }; +/** + * Returns the BSON type that the schema corresponds to, for automatic encryption. + * @api private + */ +SchemaType.prototype.autoEncryptionType = function autoEncryptionType() { + return null; +}; + /*! * Module exports. */ diff --git a/lib/types/double.js b/lib/types/double.js new file mode 100644 index 00000000000..6117173570b --- /dev/null +++ b/lib/types/double.js @@ -0,0 +1,13 @@ +/** + * Double type constructor + * + * #### Example: + * + * const pi = new mongoose.Types.Double(3.1415); + * + * @constructor Double + */ + +'use strict'; + +module.exports = require('bson').Double; diff --git a/lib/types/index.js b/lib/types/index.js index d234f6bb62a..8252aabfb21 100644 --- a/lib/types/index.js +++ b/lib/types/index.js @@ -12,6 +12,7 @@ exports.Document = // @deprecate exports.Embedded = require('./arraySubdocument'); exports.DocumentArray = require('./documentArray'); +exports.Double = require('./double'); exports.Decimal128 = require('./decimal128'); exports.ObjectId = require('./objectid'); diff --git a/lib/utils.js b/lib/utils.js index 6fc5c335ef0..c531839a20b 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -89,19 +89,19 @@ exports.deepEqual = function deepEqual(a, b) { } if ((isBsonType(a, 'ObjectId') && isBsonType(b, 'ObjectId')) || - (isBsonType(a, 'Decimal128') && isBsonType(b, 'Decimal128'))) { + (isBsonType(a, 'Decimal128') && isBsonType(b, 'Decimal128'))) { return a.toString() === b.toString(); } if (a instanceof RegExp && b instanceof RegExp) { return a.source === b.source && - a.ignoreCase === b.ignoreCase && - a.multiline === b.multiline && - a.global === b.global && - a.dotAll === b.dotAll && - a.unicode === b.unicode && - a.sticky === b.sticky && - a.hasIndices === b.hasIndices; + a.ignoreCase === b.ignoreCase && + a.multiline === b.multiline && + a.global === b.global && + a.dotAll === b.dotAll && + a.unicode === b.unicode && + a.sticky === b.sticky && + a.hasIndices === b.hasIndices; } if (a == null || b == null) { @@ -287,8 +287,8 @@ exports.merge = function merge(to, from, options, path) { // base schema has a given path as a single nested but discriminator schema // has the path as a document array, or vice versa (gh-9534) if (options.isDiscriminatorSchemaMerge && - (from[key].$isSingleNested && to[key].$isMongooseDocumentArray) || - (from[key].$isMongooseDocumentArray && to[key].$isSingleNested)) { + (from[key].$isSingleNested && to[key].$isMongooseDocumentArray) || + (from[key].$isMongooseDocumentArray && to[key].$isSingleNested)) { continue; } else if (from[key].instanceOfSchema) { if (to[key].instanceOfSchema) { @@ -551,8 +551,8 @@ exports.populate = function populate(path, select, model, match, options, subPop }; } - if (typeof obj.path !== 'string') { - throw new TypeError('utils.populate: invalid path. Expected string. Got typeof `' + typeof path + '`'); + if (typeof obj.path !== 'string' && !(Array.isArray(obj.path) && obj.path.every(el => typeof el === 'string'))) { + throw new TypeError('utils.populate: invalid path. Expected string or array of strings. Got typeof `' + typeof path + '`'); } return _populateObj(obj); @@ -600,7 +600,11 @@ function _populateObj(obj) { } const ret = []; - const paths = oneSpaceRE.test(obj.path) ? obj.path.split(manySpaceRE) : [obj.path]; + const paths = oneSpaceRE.test(obj.path) + ? obj.path.split(manySpaceRE) + : Array.isArray(obj.path) + ? obj.path + : [obj.path]; if (obj.options != null) { obj.options = clone(obj.options); } @@ -995,7 +999,7 @@ exports.getOption = function(name) { * ignore */ -exports.noop = function() {}; +exports.noop = function() { }; exports.errorToPOJO = function errorToPOJO(error) { const isError = error instanceof Error; @@ -1025,3 +1029,13 @@ exports.injectTimestampsOption = function injectTimestampsOption(writeOperation, } writeOperation.timestamps = timestampsOption; }; + +exports.print = function(...args) { + const { inspect } = require('util'); + console.error( + inspect( + ...args, + { depth: Infinity } + ) + ); +}; diff --git a/package.json b/package.json index ecade1a0589..59e1e3d4cd3 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "mongoose", "description": "Mongoose MongoDB ODM", - "version": "8.9.5", + "version": "8.12.1", "author": "Guillermo Rauch ", "keywords": [ "mongodb", @@ -20,17 +20,17 @@ "type": "commonjs", "license": "MIT", "dependencies": { - "bson": "^6.10.1", + "bson": "^6.10.3", "kareem": "2.6.3", - "mongodb": "~6.12.0", + "mongodb": "~6.14.0", "mpath": "0.9.0", "mquery": "5.0.0", "ms": "2.1.3", "sift": "17.1.3" }, "devDependencies": { - "@babel/core": "7.26.0", - "@babel/preset-env": "7.26.0", + "@babel/core": "7.26.9", + "@babel/preset-env": "7.26.9", "@typescript-eslint/eslint-plugin": "^8.19.1", "@typescript-eslint/parser": "^8.19.1", "acquit": "1.3.0", @@ -48,16 +48,16 @@ "eslint-plugin-markdown": "^5.1.0", "eslint-plugin-mocha-no-only": "1.2.0", "express": "^4.19.2", - "fs-extra": "~11.2.0", + "fs-extra": "~11.3.0", "highlight.js": "11.11.1", "lodash.isequal": "4.5.0", "lodash.isequalwith": "4.4.0", "markdownlint-cli2": "^0.17.1", - "marked": "15.0.4", + "marked": "15.0.7", "mkdirp": "^3.0.1", - "mocha": "11.0.1", + "mocha": "11.1.0", "moment": "2.30.1", - "mongodb-memory-server": "10.1.3", + "mongodb-memory-server": "10.1.4", "ncp": "^2.0.0", "nyc": "15.1.0", "pug": "3.0.3", @@ -65,9 +65,9 @@ "sinon": "19.0.2", "stream-browserify": "3.0.0", "tsd": "0.31.2", - "typescript": "5.7.2", - "uuid": "11.0.3", - "webpack": "5.97.1" + "typescript": "5.7.3", + "uuid": "11.1.0", + "webpack": "5.98.0" }, "directories": { "lib": "./lib/mongoose" @@ -146,4 +146,4 @@ "target": "ES2017" } } -} +} \ No newline at end of file diff --git a/scripts/configure-cluster-with-encryption.sh b/scripts/configure-cluster-with-encryption.sh index 8f366bc4bbc..9d1ddcf236b 100644 --- a/scripts/configure-cluster-with-encryption.sh +++ b/scripts/configure-cluster-with-encryption.sh @@ -4,10 +4,11 @@ # this script downloads all tools required to use FLE with mongodb, then starts a cluster of the provided configuration (sharded on 8.0 server) -export CWD=$(pwd); +export CWD=$(pwd) +export DRIVERS_TOOLS_PINNED_COMMIT=35d0592c76f4f3d25a5607895eb21b491dd52543 -# install extra dependency -npm install mongodb-client-encryption +# install extra dependency +npm install --no-save mongodb-client-encryption # set up mongodb cluster and encryption configuration if the data/ folder does not exist if [ ! -d "data" ]; then @@ -15,15 +16,19 @@ if [ ! -d "data" ]; then mkdir data cd data - # note: - # we're using drivers-evergreen-tools which is a repo used by MongoDB drivers to start clusters for testing. - # if you'd like to make changes to the cluster settings, edit the exported variables below. - # for configuration options for the exported variables, see here: https://github.com/mongodb-labs/drivers-evergreen-tools/blob/master/.evergreen/run-orchestration.sh - # after this script is run, the data/ folder will notably contain the following: - # 'mo-expansion.yml' file which contains for your cluster URI and crypt shared library path - # 'drivers-evergreen-tools/mongodb/bin' which contain executables for other mongodb libraries such as mongocryptd, mongosh, and mongod + # note: + # we're using drivers-evergreen-tools which is a repo used by MongoDB drivers to start clusters for testing. + # if you'd like to make changes to the cluster settings, edit the exported variables below. + # for configuration options for the exported variables, see here: https://github.com/mongodb-labs/drivers-evergreen-tools/blob/master/.evergreen/run-orchestration.sh + # after this script is run, the data/ folder will notably contain the following: + # 'mo-expansion.yml' file which contains for your cluster URI and crypt shared library path + # 'drivers-evergreen-tools/mongodb/bin' which contain executables for other mongodb libraries such as mongocryptd, mongosh, and mongod if [ ! -d "drivers-evergreen-tools/" ]; then - git clone --depth=1 "https://github.com/mongodb-labs/drivers-evergreen-tools.git" + git clone "https://github.com/mongodb-labs/drivers-evergreen-tools.git" + # pin stable commit + cd drivers-evergreen-tools + git checkout $DRIVERS_TOOLS_PINNED_COMMIT + cd .. fi # configure cluster settings @@ -41,14 +46,14 @@ if [ ! -d "data" ]; then mkdir mo cd - - rm expansions.sh 2> /dev/null + rm expansions.sh 2>/dev/null echo 'Configuring Cluster...' # start cluster - (bash $DRIVERS_TOOLS/.evergreen/run-orchestration.sh) 1> /dev/null 2> /dev/null - + (bash $DRIVERS_TOOLS/.evergreen/run-orchestration.sh) 1>/dev/null 2>/dev/null + echo 'Cluster Configuration Finished!' cd .. -fi \ No newline at end of file +fi diff --git a/scripts/run-encryption-tests.sh b/scripts/run-encryption-tests.sh new file mode 100755 index 00000000000..0209292168d --- /dev/null +++ b/scripts/run-encryption-tests.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash + +# sets up mongodb cluster and encryption configuration, adds relevant variables to the environment, and runs encryption tests + +export CWD=$(pwd); + +# set up mongodb cluster and encryption configuration if the data/ folder does not exist +# note: for tooling, cluster set-up and configuration look into the 'scripts/configure-cluster-with-encryption.sh' script + +if [ -d "data" ]; then + cd data +else + source $CWD/scripts/configure-cluster-with-encryption.sh +fi + +# extracts MONGOOSE_TEST_URI and CRYPT_SHARED_LIB_PATH from .yml file into environment variables for this test run +read -r -d '' SOURCE_SCRIPT << EOM +const fs = require('fs'); +const file = fs.readFileSync('mo-expansion.yml', { encoding: 'utf-8' }) + .trim().split('\\n'); +const regex = /^(?.*): "(?.*)"$/; +const variables = file.map( + (line) => regex.exec(line.trim()).groups +).map( + ({key, value}) => \`export \${key}='\${value}'\` +).join('\n'); + +process.stdout.write(variables); +process.stdout.write('\n'); +EOM + +node --eval "$SOURCE_SCRIPT" | tee expansions.sh +source expansions.sh + +export MONGOOSE_TEST_URI=$MONGODB_URI + +# run encryption tests +cd .. +npx mocha --exit ./test/encryption/*.test.js diff --git a/test/aggregate.test.js b/test/aggregate.test.js index a746e143e8d..fd223ed6c98 100644 --- a/test/aggregate.test.js +++ b/test/aggregate.test.js @@ -287,11 +287,14 @@ describe('aggregate: ', function() { it('works', function() { const aggregate = new Aggregate(); - assert.equal(aggregate.near({ a: 1 }), aggregate); - assert.deepEqual(aggregate._pipeline, [{ $geoNear: { a: 1 } }]); + assert.equal(aggregate.near({ near: { type: 'Point', coordinates: [1, 2] } }), aggregate); + assert.deepEqual(aggregate._pipeline, [{ $geoNear: { near: { type: 'Point', coordinates: [1, 2] } } }]); - aggregate.near({ b: 2 }); - assert.deepEqual(aggregate._pipeline, [{ $geoNear: { a: 1 } }, { $geoNear: { b: 2 } }]); + aggregate.near({ near: { type: 'Point', coordinates: [3, 4] } }); + assert.deepEqual(aggregate._pipeline, [ + { $geoNear: { near: { type: 'Point', coordinates: [1, 2] } } }, + { $geoNear: { near: { type: 'Point', coordinates: [3, 4] } } } + ]); }); it('works with discriminators (gh-3304)', function() { @@ -308,19 +311,19 @@ describe('aggregate: ', function() { aggregate._model = stub; - assert.equal(aggregate.near({ a: 1 }), aggregate); + assert.equal(aggregate.near({ near: { type: 'Point', coordinates: [1, 2] } }), aggregate); // Run exec so we apply discriminator pipeline Aggregate._prepareDiscriminatorPipeline(aggregate._pipeline, stub.schema); assert.deepEqual(aggregate._pipeline, - [{ $geoNear: { a: 1, query: { __t: 'subschema' } } }]); + [{ $geoNear: { near: { type: 'Point', coordinates: [1, 2] }, query: { __t: 'subschema' } } }]); aggregate = new Aggregate(); aggregate._model = stub; - aggregate.near({ b: 2, query: { x: 1 } }); + aggregate.near({ near: { type: 'Point', coordinates: [3, 4] }, query: { x: 1 } }); Aggregate._prepareDiscriminatorPipeline(aggregate._pipeline, stub.schema); assert.deepEqual(aggregate._pipeline, - [{ $geoNear: { b: 2, query: { x: 1, __t: 'subschema' } } }]); + [{ $geoNear: { near: { type: 'Point', coordinates: [3, 4] }, query: { x: 1, __t: 'subschema' } } }]); }); }); @@ -1284,4 +1287,47 @@ describe('aggregate: ', function() { await p; await m.disconnect(); }); + + it('throws error if calling near() with empty coordinates (gh-15188)', async function() { + const M = db.model('Test', new Schema({ loc: { type: [Number], index: '2d' } })); + assert.throws(() => { + const aggregate = new Aggregate([], M); + aggregate.near({ + near: { + type: 'Point', + coordinates: [] + } + }); + }, /Aggregate `near\(\)` argument has invalid coordinates, got ""/); + }); + + it('cursor() errors out if schema pre aggregate hook throws an error (gh-15279)', async function() { + const schema = new Schema({ name: String }); + + schema.pre('aggregate', function(next) { + if (!this.options.allowed) { + throw new Error('Unauthorized aggregate operation: only allowed operations are permitted'); + } + next(); + }); + + const Test = db.model('Test', schema); + + await Test.create({ name: 'test1' }); + + await assert.rejects( + async() => { + await Test.aggregate([{ $limit: 1 }], { allowed: false }).exec(); + }, + err => err.message === 'Unauthorized aggregate operation: only allowed operations are permitted' + ); + + const cursor = Test.aggregate([{ $limit: 1 }], { allowed: false }).cursor(); + await assert.rejects( + async() => { + await cursor.next(); + }, + err => err.message === 'Unauthorized aggregate operation: only allowed operations are permitted' + ); + }); }); diff --git a/test/bigint.test.js b/test/bigint.test.js index e3d00418e2c..c476039b0ef 100644 --- a/test/bigint.test.js +++ b/test/bigint.test.js @@ -106,22 +106,22 @@ describe('BigInt', function() { }); it('is stored as a long in MongoDB', async function() { - await Test.create({ myBigInt: 42n }); + await Test.create({ myBigInt: 9223372036854775807n }); const doc = await Test.findOne({ myBigInt: { $type: 'long' } }); assert.ok(doc); - assert.strictEqual(doc.myBigInt, 42n); + assert.strictEqual(doc.myBigInt, 9223372036854775807n); }); it('becomes a bigint with lean using useBigInt64', async function() { - await Test.create({ myBigInt: 7n }); + await Test.create({ myBigInt: 9223372036854775807n }); const doc = await Test. - findOne({ myBigInt: 7n }). + findOne({ myBigInt: 9223372036854775807n }). setOptions({ useBigInt64: true }). lean(); assert.ok(doc); - assert.strictEqual(doc.myBigInt, 7n); + assert.strictEqual(doc.myBigInt, 9223372036854775807n); }); it('can query with comparison operators', async function() { diff --git a/test/collection.test.js b/test/collection.test.js index 755eccbe55a..7e85135a7f5 100644 --- a/test/collection.test.js +++ b/test/collection.test.js @@ -64,6 +64,16 @@ describe('collections:', function() { }); }); + it('handles bufferTimeoutMS in schemaUserProvidedOptions', async function() { + db = mongoose.createConnection(); + const collection = db.collection('gh14184'); + collection.opts.schemaUserProvidedOptions = { bufferTimeoutMS: 100 }; + + const err = await collection.find({ foo: 'bar' }, {}).then(() => null, err => err); + assert.ok(err); + assert.ok(err.message.includes('buffering timed out after 100ms')); + }); + it('methods should that throw (unimplemented)', function() { const collection = new Collection('test', mongoose.connection); let thrown = false; diff --git a/test/connection.test.js b/test/connection.test.js index d73508565fe..effc91d23cb 100644 --- a/test/connection.test.js +++ b/test/connection.test.js @@ -1637,6 +1637,19 @@ describe('connections:', function() { assert.ok(!res.map(c => c.name).includes('gh12940_Conn')); }); + it('does not wait for buffering if autoCreate: false (gh-15241)', async function() { + const m = new mongoose.Mongoose(); + m.set('bufferTimeoutMS', 100); + + const schema = new Schema({ name: String }, { + autoCreate: false + }); + const Model = m.model('gh15241_Conn', schema); + + // Without gh-15241 changes, this would buffer and fail even though `autoCreate: false` + await Model.init(); + }); + it('should not create default connection with createInitialConnection = false (gh-12965)', function() { const m = new mongoose.Mongoose({ createInitialConnection: false @@ -1787,6 +1800,27 @@ describe('connections:', function() { assert.ok(res.mongoose.results[1].message.includes('not a number')); }); + it('buffers connection helpers', async function() { + const m = new mongoose.Mongoose(); + + const promise = m.connection.listCollections(); + + await new Promise(resolve => setTimeout(resolve, 100)); + await m.connect(start.uri, { bufferTimeoutMS: 1000 }); + await promise; + + await m.connection.listCollections(); + + await m.disconnect(); + }); + + it('connection helpers buffering times out', async function() { + const m = new mongoose.Mongoose(); + m.set('bufferTimeoutMS', 100); + + await assert.rejects(m.connection.listCollections(), /Connection operation buffering timed out after 100ms/); + }); + it('supports db-level aggregate on connection (gh-15118)', async function() { const db = start(); diff --git a/test/document.populate.test.js b/test/document.populate.test.js index bbfbc1df99a..2cfa7db636e 100644 --- a/test/document.populate.test.js +++ b/test/document.populate.test.js @@ -1075,4 +1075,81 @@ describe('document.populate', function() { assert.deepStrictEqual(codeUser.extras[0].config.paymentConfiguration.paymentMethods[0]._id, code._id); assert.strictEqual(codeUser.extras[0].config.paymentConfiguration.paymentMethods[0].code, 'test code'); }); + + it('supports populate with ordered option (gh-15231)', async function() { + const docSchema = new Schema({ + refA: { type: Schema.Types.ObjectId, ref: 'Test1' }, + refB: { type: Schema.Types.ObjectId, ref: 'Test2' }, + refC: { type: Schema.Types.ObjectId, ref: 'Test3' } + }); + + const doc1Schema = new Schema({ name: String }); + const doc2Schema = new Schema({ title: String }); + const doc3Schema = new Schema({ content: String }); + + const Doc = db.model('Test', docSchema); + const Doc1 = db.model('Test1', doc1Schema); + const Doc2 = db.model('Test2', doc2Schema); + const Doc3 = db.model('Test3', doc3Schema); + + const doc1 = await Doc1.create({ name: 'test 1' }); + const doc2 = await Doc2.create({ title: 'test 2' }); + const doc3 = await Doc3.create({ content: 'test 3' }); + + const docD = await Doc.create({ + refA: doc1._id, + refB: doc2._id, + refC: doc3._id + }); + + await docD.populate({ + path: ['refA', 'refB', 'refC'], + ordered: true + }); + + assert.ok(docD.populated('refA')); + assert.ok(docD.populated('refB')); + assert.ok(docD.populated('refC')); + + assert.equal(docD.refA.name, 'test 1'); + assert.equal(docD.refB.title, 'test 2'); + assert.equal(docD.refC.content, 'test 3'); + }); + + it('handles re-populating map of array of refs (gh-9359)', async function() { + const UserSchema = mongoose.Schema({ + columns: { type: Map, of: [{ type: 'ObjectId', ref: 'Test1' }] } + }); + const CardSchema = mongoose.Schema({ + title: { type: String }, + sequence: { type: 'ObjectId', ref: 'Test2' } + }); + const SequenceSchema = mongoose.Schema({ + foo: { type: String } + }); + + const Sequence = db.model('Test2', SequenceSchema); + const Card = db.model('Test1', CardSchema); + const User = db.model('Test', UserSchema); + + const sequence = await Sequence.create({ foo: 'bar' }); + const card1 = await Card.create({ title: 'card1', sequence }); + const card2 = await Card.create({ title: 'card2', sequence }); + const card3 = await Card.create({ title: 'card3' }); + const card4 = await Card.create({ title: 'card4', sequence }); + await User.create({ + columns: { key1: [card1, card2], key2: [card3, card4] } + }); + + const user = await User.findOne(); + await user.populate('columns.$*'); + assert.deepStrictEqual(user.columns.get('key1').map(subdoc => subdoc.title), ['card1', 'card2']); + assert.deepStrictEqual(user.columns.get('key2').map(subdoc => subdoc.title), ['card3', 'card4']); + await user.populate('columns.$*.sequence'); + assert.deepStrictEqual(user.columns.get('key1').map(subdoc => subdoc.title), ['card1', 'card2']); + assert.deepStrictEqual(user.columns.get('key1').map(subdoc => subdoc.sequence.foo), ['bar', 'bar']); + assert.deepStrictEqual(user.columns.get('key2').map(subdoc => subdoc.title), ['card3', 'card4']); + assert.deepStrictEqual(user.columns.get('key2').map(subdoc => subdoc.sequence?.foo), [undefined, 'bar']); + + }); }); diff --git a/test/document.test.js b/test/document.test.js index 7bef86771d2..755efc34e67 100644 --- a/test/document.test.js +++ b/test/document.test.js @@ -12,6 +12,7 @@ const ArraySubdocument = require('../lib/types/arraySubdocument'); const Query = require('../lib/query'); const assert = require('assert'); const idGetter = require('../lib/helpers/schema/idGetter'); +const sinon = require('sinon'); const util = require('./util'); const utils = require('../lib/utils'); @@ -7363,6 +7364,27 @@ describe('document', function() { assert.strictEqual(obj.subDoc.timestamp, date); }); + it('supports setting values to undefined with strict: false (gh-15192)', async function() { + const helloSchema = new mongoose.Schema({ + name: { type: String, required: true }, + status: { type: Boolean, required: true }, + optional: { type: Number } + }, { strict: false }); + const Hello = db.model('Test', helloSchema); + + const obj = new Hello({ name: 'abc', status: true, optional: 1 }); + const doc = await obj.save(); + + doc.set({ optional: undefined }); + + assert.ok(doc.isModified()); + + await doc.save(); + + const { optional } = await Hello.findById(doc._id).orFail(); + assert.strictEqual(optional, undefined); + }); + it('handles .set() on doc array within embedded discriminator (gh-7656)', function() { const pageElementSchema = new Schema({ type: { type: String, required: true } @@ -14275,6 +14297,131 @@ describe('document', function() { delete mongoose.Schema.Types.CustomType; }); + + it('supports schemaFieldsOnly option for toObject() (gh-15258)', async function() { + const schema = new Schema({ key: String }, { discriminatorKey: 'key' }); + const subschema1 = new Schema({ field1: String }); + const subschema2 = new Schema({ field2: String }); + + const Discriminator = db.model('Test', schema); + Discriminator.discriminator('type1', subschema1); + Discriminator.discriminator('type2', subschema2); + + const doc = await Discriminator.create({ + key: 'type1', + field1: 'test value' + }); + + await Discriminator.updateOne( + { _id: doc._id }, + { + key: 'type2', + field2: 'test2' + }, + { overwriteDiscriminatorKey: true } + ); + + const doc2 = await Discriminator.findById(doc).orFail(); + assert.strictEqual(doc2.field2, 'test2'); + assert.strictEqual(doc2.field1, undefined); + + const obj = doc2.toObject(); + assert.strictEqual(obj.field2, 'test2'); + assert.strictEqual(obj.field1, 'test value'); + + const obj2 = doc2.toObject({ schemaFieldsOnly: true }); + assert.strictEqual(obj.field2, 'test2'); + assert.strictEqual(obj2.field1, undefined); + }); + + it('supports schemaFieldsOnly on nested paths, subdocuments, and arrays (gh-15258)', async function() { + const subSchema = new Schema({ + title: String, + description: String + }, { _id: false }); + const taskSchema = new Schema({ + name: String, + details: { + dueDate: Date, + priority: Number + }, + subtask: subSchema, + tasks: [subSchema] + }); + const Task = db.model('Test', taskSchema); + + const doc = await Task.create({ + _id: '0'.repeat(24), + name: 'Test Task', + details: { + dueDate: new Date('2024-01-01'), + priority: 1 + }, + subtask: { + title: 'Subtask 1', + description: 'Test Description' + }, + tasks: [{ + title: 'Array Task 1', + description: 'Array Description 1' + }] + }); + + doc._doc.details.extraField = 'extra'; + doc._doc.subtask.extraField = 'extra'; + doc._doc.tasks[0].extraField = 'extra'; + + const obj = doc.toObject({ schemaFieldsOnly: true }); + assert.deepStrictEqual(obj, { + name: 'Test Task', + details: { dueDate: new Date('2024-01-01T00:00:00.000Z'), priority: 1 }, + subtask: { title: 'Subtask 1', description: 'Test Description' }, + tasks: [{ title: 'Array Task 1', description: 'Array Description 1' }], + _id: new mongoose.Types.ObjectId('0'.repeat(24)), + __v: 0 + }); + }); + + it('handles undoReset() on deep recursive subdocuments (gh-15255)', async function() { + const RecursiveSchema = new mongoose.Schema({}); + + const s = [RecursiveSchema]; + RecursiveSchema.path('nested', s); + + const generateRecursiveDocument = (depth, curr = 0) => { + return { + name: `Document of depth ${curr}`, + nested: depth > 0 ? new Array(2).fill().map(() => generateRecursiveDocument(depth - 1, curr + 1)) : [], + __v: 5 + }; + }; + const TestModel = db.model('Test', RecursiveSchema); + const data = generateRecursiveDocument(10); + const doc = new TestModel(data); + await doc.save(); + + sinon.spy(Document.prototype, '$__undoReset'); + + try { + const d = await TestModel.findById(doc._id); + d.increment(); + d.data = 'asd'; + // Force a version error by updating the document directly + await TestModel.collection.updateOne({ _id: doc._id }, { $inc: { __v: 1 } }); + const err = await d.save().then(() => null, err => err); + assert.ok(err); + assert.equal(err.name, 'VersionError'); + // `$__undoReset()` should be called 1x per subdoc, plus 1x for top-level doc. Without fix for gh-15255, + // this would fail because `$__undoReset()` is called nearly 700k times for only 2046 subdocs + assert.strictEqual(Document.prototype.$__undoReset.getCalls().length, d.$getAllSubdocs().length + 1); + assert.ok(Document.prototype.$__undoReset.getCalls().find(call => call.thisValue === d), 'top level doc was not reset'); + for (const subdoc of d.$getAllSubdocs()) { + assert.ok(Document.prototype.$__undoReset.getCalls().find(call => call.thisValue === subdoc), `${subdoc.name} was not reset`); + } + } finally { + sinon.restore(); + } + }); }); describe('Check if instance function that is supplied in schema option is available', function() { diff --git a/test/encryptedSchema.test.js b/test/encryptedSchema.test.js new file mode 100644 index 00000000000..5134d39864e --- /dev/null +++ b/test/encryptedSchema.test.js @@ -0,0 +1,538 @@ + +'use strict'; + +const assert = require('assert'); +const start = require('./common'); +const { ObjectId, Decimal128 } = require('../lib/types'); +const { Double, Int32, UUID } = require('bson'); + +const mongoose = start.mongoose; +const Schema = mongoose.Schema; + +/** + * + * @param {import('../lib').Schema} object + * @param {Array | string} path + * @returns + */ +function schemaHasEncryptedProperty(schema, path) { + path = [path].flat(); + path = path.join('.'); + + return path in schema.encryptedFields; +} + +const KEY_ID = new UUID(); +const algorithm = 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'; + +describe('encrypted schema declaration', function() { + describe('Tests that fields of valid schema types can be declared as encrypted schemas', function() { + const basicSchemaTypes = [ + { type: String, name: 'string' }, + { type: Schema.Types.Boolean, name: 'boolean' }, + { type: Schema.Types.Buffer, name: 'buffer' }, + { type: Date, name: 'date' }, + { type: ObjectId, name: 'objectid' }, + { type: BigInt, name: 'bigint' }, + { type: Decimal128, name: 'Decimal128' }, + { type: Int32, name: 'int32' }, + { type: Double, name: 'double' } + ]; + + for (const { type, name } of basicSchemaTypes) { + describe(`When a schema is instantiated with an encrypted field of type ${name}`, function() { + let schema; + beforeEach(function() { + schema = new Schema({ + field: { + type, encrypt: { keyId: KEY_ID, algorithm } + } + }, { + encryptionType: 'csfle' + }); + }); + + it(`Then the schema has an encrypted property of type ${name}`, function() { + assert.ok(schemaHasEncryptedProperty(schema, 'field')); + }); + }); + } + + describe('when a schema is instantiated with a nested encrypted schema', function() { + let schema; + beforeEach(function() { + const encryptedSchema = new Schema({ + encrypted: { + type: String, encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + schema = new Schema({ + field: encryptedSchema + }, { encryptionType: 'csfle' }); + }); + + + it('then the schema has a nested property that is encrypted', function() { + assert.ok(schemaHasEncryptedProperty(schema, ['field', 'encrypted'])); + }); + }); + + describe('when a schema is instantiated with a nested schema object', function() { + let schema; + beforeEach(function() { + schema = new Schema({ + field: { + encrypted: { + type: String, encrypt: { keyId: KEY_ID, algorithm } + } + } + }, { encryptionType: 'csfle' }); + }); + + it('then the schema has a nested property that is encrypted', function() { + assert.ok(schemaHasEncryptedProperty(schema, ['field', 'encrypted'])); + }); + }); + + describe('when a schema is instantiated as an Array', function() { + let schema; + beforeEach(function() { + schema = new Schema({ + encrypted: { + type: [Number], + encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + }); + + it('then the schema has a nested property that is encrypted', function() { + assert.ok(schemaHasEncryptedProperty(schema, 'encrypted')); + }); + }); + + }); + + describe('invalid schema types for encrypted schemas', function() { + describe('When a schema is instantiated with an encrypted field of type Number', function() { + it('Then an error is thrown', function() { + assert.throws(() => { + new Schema({ + field: { + type: Number, encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + }, /Invalid BSON type/); + }); + }); + + describe('When a schema is instantiated with an encrypted field of type Mixed', function() { + it('Then an error is thrown', function() { + assert.throws(() => { + new Schema({ + field: { + type: Schema.Types.Mixed, encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + }, /Invalid BSON type/); + }); + }); + + describe('When a schema is instantiated with a custom schema type plugin', function() { + class Int8 extends mongoose.SchemaType { + constructor(key, options) { + super(key, options, 'Int8'); + } + } + + beforeEach(function() { + // Don't forget to add `Int8` to the type registry + mongoose.Schema.Types.Int8 = Int8; + }); + afterEach(function() { + delete mongoose.Schema.Types.Int8; + }); + + it('Then an error is thrown', function() { + assert.throws(() => { + new Schema({ + field: { + type: Int8, encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + }, /Invalid BSON type/); + }); + }); + + }); + + describe('options.encryptionType', function() { + describe('when an encrypted schema is instantiated and an encryptionType is not provided', function() { + it('an error is thrown', function() { + assert.throws( + () => { + new Schema({ + field: { + type: String, + encrypt: { keyId: KEY_ID, algorithm } + } + }); + }, /encryptionType must be provided/ + ); + + + }); + }); + + describe('when a nested encrypted schema is provided to schema constructor and the encryption types are different', function() { + it('then an error is thrown', function() { + const innerSchema = new Schema({ + field1: { + type: String, encrypt: { + keyId: KEY_ID, + queries: { type: 'equality' } + } + } + }, { encryptionType: 'csfle' }); + assert.throws(() => { + new Schema({ + field1: innerSchema + }, { encryptionType: 'queryableEncryption' }); + }, /encryptionType of a nested schema must match the encryption type of the parent schema/); + }); + }); + }); + + describe('tests for schema mutation methods', function() { + describe('Schema.prototype.add()', function() { + describe('Given a schema with no encrypted fields', function() { + describe('When an encrypted field is added', function() { + it('Then the encrypted field is added to the encrypted fields for the schema', function() { + const schema = new Schema({ + field1: Number + }); + schema.encryptionType('csfle'); + schema.add( + { name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } } + ); + assert.ok(schemaHasEncryptedProperty(schema, ['name'])); + }); + }); + }); + + describe('Given a schema with an encrypted field', function() { + describe('when an encrypted field is added', function() { + describe('and the encryption type matches the existing encryption type', function() { + it('Then the encrypted field is added to the encrypted fields for the schema', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.add( + { name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } } + ); + assert.ok(schemaHasEncryptedProperty(schema, ['name'])); + }); + }); + }); + }); + + describe('Given a schema with an encrypted field', function() { + describe('when an encrypted field is added with different encryption settings for the same field', function() { + it('The encryption settings for the field are overridden', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.add( + { name: { type: String, encrypt: { keyId: new UUID(), algorithm } } } + ); + assert.notEqual(schema.encryptedFields['name'].keyId, KEY_ID); + }); + + }); + + describe('When an unencrypted field is added for the same field', function() { + it('The field on the schema is overridden', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.add( + { field1: String } + ); + assert.equal(schemaHasEncryptedProperty(schema, ['field1']), false); + }); + + }); + }); + + describe('Given a schema', function() { + describe('When multiple encrypted fields are added to the schema in one call to add()', function() { + it('Then all the encrypted fields are added to the schema', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.add( + { + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + } + ); + + assert.ok(schemaHasEncryptedProperty(schema, ['name'])); + assert.ok(schemaHasEncryptedProperty(schema, ['age'])); + }); + }); + }); + }); + + describe('Schema.prototype.remove()', function() { + describe('Given a schema with one encrypted field', function() { + describe('When the encrypted field is removed', function() { + it('Then the encrypted fields on the schema does not contain the removed field', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.remove('field1'); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1']), false); + }); + }); + }); + + describe('Given a schema with multiple encrypted fields', function() { + describe('When one encrypted field is removed', function() { + it('The encrypted fields on the schema does not contain the removed field', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.remove(['field1']); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1']), false); + assert.equal(schemaHasEncryptedProperty(schema, ['name']), true); + assert.equal(schemaHasEncryptedProperty(schema, ['age']), true); + }); + }); + + describe('When all encrypted fields are removed', function() { + it('The encrypted fields on the schema does not contain the removed field', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.remove(['field1', 'name', 'age']); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1']), false); + assert.equal(schemaHasEncryptedProperty(schema, ['name']), false); + assert.equal(schemaHasEncryptedProperty(schema, ['age']), false); + }); + }); + }); + + describe('when a nested encrypted property is removed', function() { + it('the encrypted field is removed from the schema', function() { + const schema = new Schema({ + field1: { name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } } + }, { encryptionType: 'csfle' }); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1.name']), true); + + schema.remove(['field1.name']); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1.name']), false); + }); + }); + }); + }); + + describe('tests for schema copying methods', function() { + describe('Schema.prototype.clone()', function() { + describe('Given a schema with encrypted fields', function() { + describe('When the schema is cloned', function() { + it('The resultant schema contains all the same encrypted fields as the original schema', function() { + const schema1 = new Schema({ name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } }, { encryptionType: 'csfle' }); + const schema2 = schema1.clone(); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), true); + }); + it('The encryption type of the cloned schema is the same as the original', function() { + const schema1 = new Schema({ name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } }, { encryptionType: 'csfle' }); + const schema2 = schema1.clone(); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + describe('When the cloned schema is modified', function() { + it('The original is not modified', function() { + const schema1 = new Schema({ name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } }, { encryptionType: 'csfle' }); + const schema2 = schema1.clone(); + schema2.remove('name'); + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), false); + assert.equal(schemaHasEncryptedProperty(schema1, ['name']), true); + }); + }); + }); + }); + }); + + describe('Schema.prototype.pick()', function() { + describe('When pick() is used with only unencrypted fields', function() { + it('Then the resultant schema has none of the original schema’s encrypted fields', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name1', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), false); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), false); + }); + it('Then the encryption type is set to the cloned schemas encryptionType', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name1', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + + describe('When pick() is used with some unencrypted fields', function() { + it('Then the resultant schema has the encrypted fields of the original schema that were specified to pick().', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), true); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), false); + }); + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + + describe('When pick() is used with nested paths', function() { + it('Then the resultant schema has the encrypted fields of the original schema that were specified to pick().', function() { + const originalSchema = new Schema({ + name: { + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name.name', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name', 'name']), true); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), false); + }); + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + }); + + describe('Schema.prototype.omit()', function() { + describe('When omit() is used with only unencrypted fields', function() { + it('Then the resultant schema has all the original schema’s encrypted fields', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name1', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), true); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), true); + }); + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name1', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + + describe('When omit() is used with some unencrypted fields', function() { + it('Then the resultant schema has the encrypted fields of the original schema that were specified to omit()', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), false); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), true); + }); + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + + describe('When omit() is used with all the encrypted fields', function() { + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name', 'age']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + }); + }); +}); diff --git a/test/encrypted_schema.test.js b/test/encrypted_schema.test.js new file mode 100644 index 00000000000..d5712aabe1a --- /dev/null +++ b/test/encrypted_schema.test.js @@ -0,0 +1,1091 @@ + +'use strict'; + +const assert = require('assert'); +const start = require('./common'); +const { ObjectId, Decimal128 } = require('../lib/types'); +const { Double, Int32, UUID } = require('bson'); + +const mongoose = start.mongoose; +const Schema = mongoose.Schema; + +/** + * + * @param {import('../lib').Schema} object + * @param {Array | string} path + * @returns + */ +function schemaHasEncryptedProperty(schema, path) { + path = [path].flat(); + path = path.join('.'); + + return path in schema.encryptedFields; +} + +const KEY_ID = '9fbdace3-4e48-412d-88df-3807e8009522'; +const algorithm = 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'; + +describe('encrypted schema declaration', function() { + describe('schemaMap generation tests', function() { + for (const { type, name, encryptionType, schemaMap, encryptedFields } of primitiveSchemaMapTests()) { + describe(`When a schema is instantiated with an encrypted field of type ${name} for ${encryptionType}`, function() { + let schema; + const encrypt = { + keyId: KEY_ID + }; + encryptionType === 'csfle' && (encrypt.algorithm = algorithm); + + beforeEach(function() { + schema = new Schema({ + field: { + type, encrypt + } + }, { + encryptionType + }); + }); + + it(`Then the schema has an encrypted property of type ${name}`, function() { + assert.ok(schemaHasEncryptedProperty(schema, 'field')); + }); + + encryptionType === 'csfle' && it('then the generated schemaMap is correct', function() { + assert.deepEqual(schema._buildSchemaMap(), schemaMap); + }); + + encryptionType === 'qe' && it('then the generated encryptedFieldsMap is correct', function() { + assert.deepEqual(schema._buildEncryptedFields(), encryptedFields); + }); + }); + } + }); + + describe('Tests that fields of valid schema types can be declared as encrypted schemas', function() { + const tests = { + 'nested schema for csfle': + { + schemaFactory: () => { + const encryptedSchema = new Schema({ + encrypted: { + type: String, encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + return new Schema({ + field: encryptedSchema + }, { encryptionType: 'csfle' }); + }, predicate: (schema) => assert.ok(schemaHasEncryptedProperty(schema, ['field', 'encrypted'])), + schemaMap: { + bsonType: 'object', + properties: { + field: { + bsonType: 'object', + properties: { + encrypted: { encrypt: { bsonType: 'string', algorithm, keyId: KEY_ID } } + } + } + } + } + }, + 'nested schema for qe': { + schemaFactory: () => { + const encryptedSchema = new Schema({ + encrypted: { + type: String, encrypt: { keyId: KEY_ID } + } + }, { encryptionType: 'qe' }); + return new Schema({ + field: encryptedSchema + }, { encryptionType: 'qe' }); + }, predicate: (schema) => assert.ok(schemaHasEncryptedProperty(schema, ['field', 'encrypted'])), + encryptedFields: { + fields: [ + { path: 'field.encrypted', keyId: KEY_ID, bsonType: 'string' } + ] + } + }, + 'nested object for csfle': + { + schemaFactory: () => { + return new Schema({ + field: { + encrypted: { + type: String, encrypt: { keyId: KEY_ID, algorithm } + } + } + }, { encryptionType: 'csfle' }); + }, predicate: (schema) => assert.ok(schemaHasEncryptedProperty(schema, ['field', 'encrypted'])), + schemaMap: { + bsonType: 'object', + properties: { + field: { + bsonType: 'object', + properties: { + encrypted: { encrypt: { bsonType: 'string', algorithm, keyId: KEY_ID } } + } + } + } + } + }, + 'nested object for qe': { + schemaFactory: () => { + return new Schema({ + field: { + encrypted: { + type: String, encrypt: { keyId: KEY_ID } + } + } + }, { encryptionType: 'qe' }); + }, predicate: (schema) => assert.ok(schemaHasEncryptedProperty(schema, ['field', 'encrypted'])), + encryptedFields: { + fields: [ + { path: 'field.encrypted', keyId: KEY_ID, bsonType: 'string' } + ] + } + }, + 'schema with encrypted array for csfle': { + schemaFactory: () => { + return new Schema({ + encrypted: { + type: [Number], + encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + }, predicate: (schema) => assert.ok(schemaHasEncryptedProperty(schema, ['encrypted'])), + schemaMap: { + bsonType: 'object', + properties: { + encrypted: { + encrypt: { + bsonType: 'array', + keyId: KEY_ID, + algorithm + } + } + } + } + }, + 'schema with encrypted array for qe': { + schemaFactory: () => { + return new Schema({ + encrypted: { + type: [Number], + encrypt: { keyId: KEY_ID } + } + }, { encryptionType: 'qe' }); + }, predicate: (schema) => assert.ok(schemaHasEncryptedProperty(schema, ['encrypted'])), + encryptedFields: { + fields: [ + { path: 'encrypted', keyId: KEY_ID, bsonType: 'array' } + ] + } + } + }; + + for (const [description, { schemaFactory, predicate, schemaMap, encryptedFields }] of Object.entries(tests)) { + it(description, function() { + const schema = schemaFactory(); + predicate(schema); + + schemaMap && assert.deepEqual(schema._buildSchemaMap(), schemaMap); + encryptedFields && assert.deepEqual(schema._buildEncryptedFields(), encryptedFields); + }); + } + }); + + describe('invalid schema types for encrypted schemas', function() { + describe('When a schema is instantiated with an encrypted field of type Number', function() { + it('Then an error is thrown', function() { + assert.throws(() => { + new Schema({ + field: { + type: Number, encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + }, /unable to determine bson type for field `field`/); + }); + }); + + describe('When a schema is instantiated with an encrypted field of type Mixed', function() { + it('Then an error is thrown', function() { + assert.throws(() => { + new Schema({ + field: { + type: Schema.Types.Mixed, encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + }, /unable to determine bson type for field `field`/); + }); + }); + + describe('When a schema is instantiated with a custom schema type plugin', function() { + class Int8 extends mongoose.SchemaType { + constructor(key, options) { + super(key, options, 'Int8'); + } + } + + beforeEach(function() { + // Don't forget to add `Int8` to the type registry + mongoose.Schema.Types.Int8 = Int8; + }); + afterEach(function() { + delete mongoose.Schema.Types.Int8; + }); + + it('Then an error is thrown', function() { + assert.throws(() => { + new Schema({ + field: { + type: Int8, encrypt: { keyId: KEY_ID, algorithm } + } + }, { encryptionType: 'csfle' }); + }, /unable to determine bson type for field `field`/); + }); + }); + + }); + + describe('options.encryptionType', function() { + describe('when an encrypted schema is instantiated and an encryptionType is not provided', function() { + it('an error is thrown', function() { + assert.throws( + () => { + new Schema({ + field: { + type: String, + encrypt: { keyId: KEY_ID, algorithm } + } + }); + }, /encryptionType must be provided/ + ); + + + }); + }); + + describe('when a nested encrypted schema is provided to schema constructor and the encryption types are different', function() { + it('then an error is thrown', function() { + const innerSchema = new Schema({ + field1: { + type: String, encrypt: { + keyId: KEY_ID, + queries: { type: 'equality' } + } + } + }, { encryptionType: 'csfle' }); + assert.throws(() => { + new Schema({ + field1: innerSchema + }, { encryptionType: 'queryableEncryption' }); + }, /encryptionType of a nested schema must match the encryption type of the parent schema/); + }); + }); + }); + + describe('tests for schema mutation methods', function() { + describe('Schema.prototype.add()', function() { + describe('Given a schema with no encrypted fields', function() { + describe('When an encrypted field is added', function() { + it('Then the encrypted field is added to the encrypted fields for the schema', function() { + const schema = new Schema({ + field1: Number + }); + schema.encryptionType('csfle'); + schema.add( + { name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } } + ); + assert.ok(schemaHasEncryptedProperty(schema, ['name'])); + }); + }); + }); + + describe('Given a schema with an encrypted field', function() { + describe('when an encrypted field is added', function() { + describe('and the encryption type matches the existing encryption type', function() { + it('Then the encrypted field is added to the encrypted fields for the schema', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.add( + { name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } } + ); + assert.ok(schemaHasEncryptedProperty(schema, ['name'])); + }); + }); + }); + }); + + describe('Given a schema with an encrypted field', function() { + describe('when an encrypted field is added with different encryption settings for the same field', function() { + it('The encryption settings for the field are overridden', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.add( + { name: { type: String, encrypt: { keyId: new UUID(), algorithm } } } + ); + assert.notEqual(schema.encryptedFields['name'].keyId, KEY_ID); + }); + + }); + + describe('When an unencrypted field is added for the same field', function() { + it('The field on the schema is overridden', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.add( + { field1: String } + ); + assert.equal(schemaHasEncryptedProperty(schema, ['field1']), false); + }); + + }); + }); + + describe('Given a schema', function() { + describe('When multiple encrypted fields are added to the schema in one call to add()', function() { + it('Then all the encrypted fields are added to the schema', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.add( + { + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + } + ); + + assert.ok(schemaHasEncryptedProperty(schema, ['name'])); + assert.ok(schemaHasEncryptedProperty(schema, ['age'])); + }); + }); + }); + }); + + describe('Schema.prototype.remove()', function() { + describe('Given a schema with one encrypted field', function() { + describe('When the encrypted field is removed', function() { + it('Then the encrypted fields on the schema does not contain the removed field', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.remove('field1'); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1']), false); + }); + }); + }); + + describe('Given a schema with multiple encrypted fields', function() { + describe('When one encrypted field is removed', function() { + it('The encrypted fields on the schema does not contain the removed field', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.remove(['field1']); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1']), false); + assert.equal(schemaHasEncryptedProperty(schema, ['name']), true); + assert.equal(schemaHasEncryptedProperty(schema, ['age']), true); + }); + }); + + describe('When all encrypted fields are removed', function() { + it('The encrypted fields on the schema does not contain the removed field', function() { + const schema = new Schema({ + field1: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, { encryptionType: 'csfle' }); + schema.remove(['field1', 'name', 'age']); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1']), false); + assert.equal(schemaHasEncryptedProperty(schema, ['name']), false); + assert.equal(schemaHasEncryptedProperty(schema, ['age']), false); + }); + }); + }); + + describe('when a nested encrypted property is removed', function() { + it('the encrypted field is removed from the schema', function() { + const schema = new Schema({ + field1: { name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } } + }, { encryptionType: 'csfle' }); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1.name']), true); + + schema.remove(['field1.name']); + + assert.equal(schemaHasEncryptedProperty(schema, ['field1.name']), false); + }); + }); + }); + }); + + describe('tests for schema copying methods', function() { + describe('Schema.prototype.clone()', function() { + describe('Given a schema with encrypted fields', function() { + describe('When the schema is cloned', function() { + it('The resultant schema contains all the same encrypted fields as the original schema', function() { + const schema1 = new Schema({ name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } }, { encryptionType: 'csfle' }); + const schema2 = schema1.clone(); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), true); + }); + it('The encryption type of the cloned schema is the same as the original', function() { + const schema1 = new Schema({ name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } }, { encryptionType: 'csfle' }); + const schema2 = schema1.clone(); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + describe('When the cloned schema is modified', function() { + it('The original is not modified', function() { + const schema1 = new Schema({ name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } }, { encryptionType: 'csfle' }); + const schema2 = schema1.clone(); + schema2.remove('name'); + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), false); + assert.equal(schemaHasEncryptedProperty(schema1, ['name']), true); + }); + }); + }); + }); + }); + + describe('Schema.prototype.pick()', function() { + describe('When pick() is used with only unencrypted fields', function() { + it('Then the resultant schema has none of the original schema’s encrypted fields', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name1', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), false); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), false); + }); + it('Then the encryption type is set to the cloned schemas encryptionType', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name1', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + + describe('When pick() is used with some unencrypted fields', function() { + it('Then the resultant schema has the encrypted fields of the original schema that were specified to pick().', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), true); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), false); + }); + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + + describe('When pick() is used with nested paths', function() { + it('Then the resultant schema has the encrypted fields of the original schema that were specified to pick().', function() { + const originalSchema = new Schema({ + name: { + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } } + }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name.name', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name', 'name']), true); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), false); + }); + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.pick(['name', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + }); + + describe('Schema.prototype.omit()', function() { + describe('When omit() is used with only unencrypted fields', function() { + it('Then the resultant schema has all the original schema’s encrypted fields', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name1', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), true); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), true); + }); + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name1', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + + describe('When omit() is used with some unencrypted fields', function() { + it('Then the resultant schema has the encrypted fields of the original schema that were specified to omit()', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name', 'age1']); + + assert.equal(schemaHasEncryptedProperty(schema2, ['name']), false); + assert.equal(schemaHasEncryptedProperty(schema2, ['age']), true); + }); + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name', 'age1']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + + describe('When omit() is used with all the encrypted fields', function() { + it('Then the encryption type is the same as the original schema’s encryption type', function() { + const originalSchema = new Schema({ + name: { type: String, encrypt: { keyId: KEY_ID, algorithm } }, + age: { type: Int32, encrypt: { keyId: KEY_ID, algorithm } }, + name1: String, + age1: Int32 + }, { encryptionType: 'csfle' }); + + const schema2 = originalSchema.omit(['name', 'age']); + + assert.equal(schema2.encryptionType(), 'csfle'); + }); + }); + }); + }); +}); + +function primitiveSchemaMapTests() { + return [ + { + name: 'string', + type: String, + encryptionType: 'csfle', + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic', + bsonType: 'string' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'string', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' + } + ] + } + }, + { + name: 'string', + type: String, + encryptionType: 'qe', + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + bsonType: 'string' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'string', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522' + } + ] + } + }, + { + name: 'boolean', + type: Schema.Types.Boolean, + encryptionType: 'csfle', + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic', + bsonType: 'bool' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'bool', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' + } + ] + } + }, + { + name: 'boolean', + encryptionType: 'qe', + type: Schema.Types.Boolean, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + bsonType: 'bool' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'bool', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522' + } + ] + } + }, + { + name: 'buffer', + encryptionType: 'csfle', + type: Schema.Types.Buffer, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic', + bsonType: 'binData' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'binData', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' + } + ] + } + }, + { + name: 'buffer', + encryptionType: 'qe', + type: Schema.Types.Buffer, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + bsonType: 'binData' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'binData', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522' + } + ] + } + }, + { + name: 'date', + encryptionType: 'csfle', + type: Date, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic', + bsonType: 'date' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'date', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' + } + ] + } + }, + { + name: 'date', + encryptionType: 'qe', + type: Date, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + bsonType: 'date' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'date', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522' + } + ] + } + }, + { + name: 'objectid', + encryptionType: 'csfle', + type: ObjectId, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic', + bsonType: 'objectId' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'objectId', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' + } + ] + } + }, + { + name: 'objectid', + encryptionType: 'qe', + type: ObjectId, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + bsonType: 'objectId' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'objectId', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522' + } + ] + } + }, + { + name: 'bigint', + encryptionType: 'csfle', + type: BigInt, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic', + bsonType: 'long' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'long', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' + } + ] + } + }, + { + name: 'bigint', + encryptionType: 'qe', + type: BigInt, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + bsonType: 'long' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'long', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522' + } + ] + } + }, + { + name: 'Decimal128', + encryptionType: 'csfle', + type: Decimal128, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic', + bsonType: 'decimal' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'decimal', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' + } + ] + } + }, + { + name: 'Decimal128', + encryptionType: 'qe', + type: Decimal128, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + bsonType: 'decimal' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'decimal', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522' + } + ] + } + }, + { + name: 'int32', + encryptionType: 'csfle', + type: Int32, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic', + bsonType: 'int' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'int', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' + } + ] + } + }, + { + name: 'int32', + encryptionType: 'qe', + type: Int32, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + bsonType: 'int' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'int', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522' + } + ] + } + }, + { + name: 'double', + encryptionType: 'csfle', + type: Double, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic', + bsonType: 'double' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'double', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' + } + ] + } + }, + { + name: 'double', + encryptionType: 'qe', + type: Double, + schemaMap: { + bsonType: 'object', + properties: { + field: { + encrypt: { + keyId: '9fbdace3-4e48-412d-88df-3807e8009522', + bsonType: 'double' + } + } + } + }, + encryptedFields: { + fields: [ + { + path: 'field', + bsonType: 'double', + keyId: '9fbdace3-4e48-412d-88df-3807e8009522' + } + ] + } + } + ]; +} diff --git a/test/encryption/encryption.test.js b/test/encryption/encryption.test.js index a3b562e80aa..d78fbfc4a82 100644 --- a/test/encryption/encryption.test.js +++ b/test/encryption/encryption.test.js @@ -1,14 +1,24 @@ 'use strict'; const assert = require('assert'); -const mongodb = require('mongodb'); -const fs = require('fs'); +const mdb = require('mongodb'); const isBsonType = require('../../lib/helpers/isBsonType'); +const { Schema, createConnection } = require('../../lib'); +const { ObjectId, Double, Int32, Decimal128 } = require('bson'); const LOCAL_KEY = Buffer.from('Mng0NCt4ZHVUYUJCa1kxNkVyNUR1QURhZ2h2UzR2d2RrZzh0cFBwM3R6NmdWMDFBMUN3YkQ5aXRRMkhGRGdQV09wOGVNYUMxT2k3NjZKelhaQmRCZGJkTXVyZG9uSjFk', 'base64'); -describe('ci', () => { +/** + * @param {object} object + * @param {string} property + */ +function isEncryptedValue(object, property) { + const value = object[property]; + assert.ok(isBsonType(value, 'Binary'), `auto encryption for property ${property} failed: not a BSON binary.`); + assert.ok(value.sub_type === 6, `auto encryption for property ${property} failed: not subtype 6.`); +} +describe('ci', () => { const cachedUri = process.env.MONGOOSE_TEST_URI; const cachedLib = process.env.CRYPT_SHARED_LIB_PATH; @@ -17,6 +27,8 @@ describe('ci', () => { const file = fs.readFileSync(cwd + '/data/mo-expansion.yml', { encoding: 'utf-8' }).trim().split('\n'); const regex = /^(?.*): "(?.*)"$/; const variables = file.map((line) => regex.exec(line.trim()).groups).reduce((acc, { key, value }) => ({ ...acc, [key]: value }), {}); + console.log('File contents', file); + console.log('Variables', variables); process.env.CRYPT_SHARED_LIB_PATH = variables.CRYPT_SHARED_LIB_PATH; process.env.MONGOOSE_TEST_URI = variables.MONGODB_URI; }); @@ -29,87 +41,750 @@ describe('ci', () => { describe('environmental variables', () => { it('MONGOOSE_TEST_URI is set', async function() { const uri = process.env.MONGOOSE_TEST_URI; + console.log('MONGOOSE_TEST_URI=', uri); assert.ok(uri); }); it('CRYPT_SHARED_LIB_PATH is set', async function() { const shared_library_path = process.env.CRYPT_SHARED_LIB_PATH; + console.log('CRYPT_SHARED_LIB_PATH=', shared_library_path); assert.ok(shared_library_path); }); }); - describe('basic integration', () => { - let keyVaultClient; - let dataKey; - let encryptedClient; - let unencryptedClient; - - beforeEach(async function() { - keyVaultClient = new mongodb.MongoClient(process.env.MONGOOSE_TEST_URI); - await keyVaultClient.connect(); - await keyVaultClient.db('keyvault').collection('datakeys'); - const clientEncryption = new mongodb.ClientEncryption(keyVaultClient, { - keyVaultNamespace: 'keyvault.datakeys', - kmsProviders: { local: { key: LOCAL_KEY } } + let keyId, keyId2, keyId3; + let utilClient; + + beforeEach(async function() { + const keyVaultClient = new mdb.MongoClient(process.env.MONGOOSE_TEST_URI); + await keyVaultClient.connect(); + await keyVaultClient.db('keyvault').collection('datakeys'); + const clientEncryption = new mdb.ClientEncryption(keyVaultClient, { + keyVaultNamespace: 'keyvault.datakeys', + kmsProviders: { local: { key: LOCAL_KEY } } + }); + keyId = await clientEncryption.createDataKey('local'); + keyId2 = await clientEncryption.createDataKey('local'); + keyId3 = await clientEncryption.createDataKey('local'); + await keyVaultClient.close(); + + utilClient = new mdb.MongoClient(process.env.MONGOOSE_TEST_URI); + }); + + afterEach(async function() { + await utilClient.db('db').dropDatabase({ + w: 'majority' + }); + await utilClient.close(); + }); + + describe('Tests that fields of valid schema types can be declared as encrypted schemas', function() { + const algorithm = 'AEAD_AES_256_CBC_HMAC_SHA_512-Random'; + let connection; + let schema; + let model; + + const basicSchemaTypes = [ + { type: String, name: 'string', input: 3, expected: 3 }, + { type: Schema.Types.Boolean, name: 'boolean', input: true, expected: true }, + { type: Schema.Types.Buffer, name: 'buffer', input: Buffer.from([1, 2, 3]) }, + { type: Date, name: 'date', input: new Date(12, 12, 2012), expected: new Date(12, 12, 2012) }, + { type: ObjectId, name: 'objectid', input: new ObjectId() }, + { type: BigInt, name: 'bigint', input: 3n }, + { type: Decimal128, name: 'Decimal128', input: new Decimal128('1.5') }, + { type: Int32, name: 'int32', input: new Int32(5), expected: 5 }, + { type: Double, name: 'double', input: new Double(1.5) } + ]; + + for (const { type, name, input, expected } of basicSchemaTypes) { + this.afterEach(async function() { + await connection?.close(); }); - dataKey = await clientEncryption.createDataKey('local'); - encryptedClient = new mongodb.MongoClient( - process.env.MONGOOSE_TEST_URI, - { - autoEncryption: { - keyVaultNamespace: 'keyvault.datakeys', - kmsProviders: { local: { key: LOCAL_KEY } }, - schemaMap: { - 'db.coll': { - bsonType: 'object', - encryptMetadata: { - keyId: [dataKey] - }, - properties: { - a: { - encrypt: { - bsonType: 'int', - algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Random', - keyId: [dataKey] - } + // eslint-disable-next-line no-inner-declarations + async function test() { + const [{ _id }] = await model.insertMany([{ field: input }]); + const encryptedDoc = await utilClient.db('db').collection('schemas').findOne({ _id }); + + assert.ok(isBsonType(encryptedDoc.field, 'Binary')); + assert.ok(encryptedDoc.field.sub_type === 6); + + const doc = await model.findOne({ _id }); + if (Buffer.isBuffer(input)) { + // mongoose's Buffer does not support deep equality - instead use the Buffer.equals method. + assert.ok(doc.field.equals(input)); + } else { + assert.deepEqual(doc.field, expected ?? input); + } + } + + describe('CSFLE', function() { + beforeEach(async function() { + schema = new Schema({ + field: { + type, encrypt: { keyId: [keyId], algorithm } + } + }, { + encryptionType: 'csfle' + }); + + connection = createConnection(); + model = connection.model('Schema', schema); + await connection.openUri(process.env.MONGOOSE_TEST_URI, { + dbName: 'db', autoEncryption: { + keyVaultNamespace: 'keyvault.datakeys', + kmsProviders: { local: { key: LOCAL_KEY } }, + extraOptions: { + cryptdSharedLibRequired: true, + cryptSharedLibPath: process.env.CRYPT_SHARED_LIB_PATH + } + } + }); + }); + + it(`${name} encrypts and decrypts`, test); + }); + + describe('queryableEncryption', function() { + beforeEach(async function() { + schema = new Schema({ + field: { + type, encrypt: { keyId: keyId } + } + }, { + encryptionType: 'queryableEncryption' + }); + + connection = createConnection(); + model = connection.model('Schema', schema); + await connection.openUri(process.env.MONGOOSE_TEST_URI, { + dbName: 'db', autoEncryption: { + keyVaultNamespace: 'keyvault.datakeys', + kmsProviders: { local: { key: LOCAL_KEY } }, + extraOptions: { + cryptdSharedLibRequired: true, + cryptSharedLibPath: process.env.CRYPT_SHARED_LIB_PATH + } + } + }); + }); + + it(`${name} encrypts and decrypts`, test); + }); + } + + describe('nested object schemas', function() { + const tests = { + 'nested object schemas for CSFLE': { + modelFactory: () => { + const schema = new Schema({ + a: { + b: { + c: { + type: String, + encrypt: { keyId: [keyId], algorithm } + } + } + } + }, { + encryptionType: 'csfle' + }); + + connection = createConnection(); + model = connection.model('Schema', schema); + return { model }; + + } + }, + 'nested object schemas for QE': { + modelFactory: () => { + const schema = new Schema({ + a: { + b: { + c: { + type: String, + encrypt: { keyId: keyId } } } } - }, + }, { + encryptionType: 'queryableEncryption' + }); + + connection = createConnection(); + model = connection.model('Schema', schema); + return { model }; + + } + }, + 'nested schemas for csfle': { + modelFactory: () => { + const nestedSchema = new Schema({ + b: { + c: { + type: String, + encrypt: { keyId: [keyId], algorithm } + } + } + }, { + encryptionType: 'csfle' + }); + + const schema = new Schema({ + a: nestedSchema + }, { + encryptionType: 'csfle' + }); + + connection = createConnection(); + model = connection.model('Schema', schema); + return { model }; + + } + }, + 'nested schemas for QE': { + modelFactory: () => { + const nestedSchema = new Schema({ + b: { + c: { + type: String, + encrypt: { keyId: keyId } + } + } + }, { + encryptionType: 'queryableEncryption' + }); + const schema = new Schema({ + a: nestedSchema + }, { + encryptionType: 'queryableEncryption' + }); + + connection = createConnection(); + model = connection.model('Schema', schema); + return { model }; + + } + } + }; + + for (const [description, { modelFactory }] of Object.entries(tests)) { + describe(description, function() { + it('encrypts and decrypts', async function() { + const { model } = modelFactory(); + + await connection.openUri(process.env.MONGOOSE_TEST_URI, { + dbName: 'db', autoEncryption: { + keyVaultNamespace: 'keyvault.datakeys', + kmsProviders: { local: { key: LOCAL_KEY } }, + extraOptions: { + cryptdSharedLibRequired: true, + cryptSharedLibPath: process.env.CRYPT_SHARED_LIB_PATH + } + } + }); + + const [{ _id }] = await model.insertMany([{ a: { b: { c: 'hello' } } }]); + const encryptedDoc = await utilClient.db('db').collection('schemas').findOne({ _id }); + + assert.ok(isBsonType(encryptedDoc.a.b.c, 'Binary')); + assert.ok(encryptedDoc.a.b.c.sub_type === 6); + + const doc = await model.findOne({ _id }); + assert.deepEqual(doc.a.b.c, 'hello'); + }); + }); + } + }); + + describe('array encrypted fields', function() { + const tests = { + 'array fields for CSFLE': { + modelFactory: () => { + const schema = new Schema({ + a: { + type: [Int32], + encrypt: { + keyId: [keyId], + algorithm + } + } + }, { + encryptionType: 'csfle' + }); + + connection = createConnection(); + model = connection.model('Schema', schema); + return { model }; + } + }, + 'array field for QE': { + modelFactory: () => { + const schema = new Schema({ + a: { + type: [Int32], + encrypt: { + keyId + } + } + }, { + encryptionType: 'queryableEncryption' + }); + + connection = createConnection(); + model = connection.model('Schema', schema); + return { model }; + } + } + }; + + for (const [description, { modelFactory }] of Object.entries(tests)) { + describe(description, function() { + it('encrypts and decrypts', async function() { + const { model } = modelFactory(); + await connection.openUri(process.env.MONGOOSE_TEST_URI, { + dbName: 'db', autoEncryption: { + keyVaultNamespace: 'keyvault.datakeys', + kmsProviders: { local: { key: LOCAL_KEY } }, + extraOptions: { + cryptdSharedLibRequired: true, + cryptSharedLibPath: process.env.CRYPT_SHARED_LIB_PATH + } + } + }); + + const [{ _id }] = await model.insertMany([{ a: [new Int32(3)] }]); + const encryptedDoc = await utilClient.db('db').collection('schemas').findOne({ _id }); + + assert.ok(isBsonType(encryptedDoc.a, 'Binary')); + assert.ok(encryptedDoc.a.sub_type === 6); + + const doc = await model.findOne({ _id }); + assert.deepEqual(doc.a, [3]); + }); + }); + } + }); + + describe('multiple encrypted fields in a model', function() { + const tests = { + 'multiple fields in a schema for CSFLE': { + modelFactory: () => { + const encrypt = { + keyId: [keyId], + algorithm + }; + + const schema = new Schema({ + a: { + type: String, + encrypt + }, + b: { + type: BigInt + }, + c: { + d: { + type: String, + encrypt + } + } + }, { + encryptionType: 'csfle' + }); + + connection = createConnection(); + model = connection.model('Schema', schema); + return { model }; + } + }, + 'multiple fields in a schema for QE': { + modelFactory: () => { + const schema = new Schema({ + a: { + type: String, + encrypt: { + keyId + } + }, + b: { + type: BigInt + }, + c: { + d: { + type: String, + encrypt: { + keyId: keyId2 + } + } + } + }, { + encryptionType: 'queryableEncryption' + }); + + connection = createConnection(); + model = connection.model('Schema', schema); + return { model }; + } + } + }; + + for (const [description, { modelFactory }] of Object.entries(tests)) { + describe(description, function() { + it('encrypts and decrypts', async function() { + const { model } = modelFactory(); + await connection.openUri(process.env.MONGOOSE_TEST_URI, { + dbName: 'db', autoEncryption: { + keyVaultNamespace: 'keyvault.datakeys', + kmsProviders: { local: { key: LOCAL_KEY } }, + extraOptions: { + cryptdSharedLibRequired: true, + cryptSharedLibPath: process.env.CRYPT_SHARED_LIB_PATH + } + } + }); + + const [{ _id }] = await model.insertMany([{ a: 'hello', b: 1n, c: { d: 'world' } }]); + const encryptedDoc = await utilClient.db('db').collection('schemas').findOne({ _id }); + + assert.ok(isBsonType(encryptedDoc.a, 'Binary')); + assert.ok(encryptedDoc.a.sub_type === 6); + assert.ok(typeof encryptedDoc.b === 'number'); + assert.ok(isBsonType(encryptedDoc.c.d, 'Binary')); + assert.ok(encryptedDoc.c.d.sub_type === 6); + + const doc = await model.findOne({ _id }, {}); + assert.deepEqual(doc.a, 'hello'); + assert.deepEqual(doc.b, 1n); + assert.deepEqual(doc.c, { d: 'world' }); + }); + }); + } + }); + + describe('multiple schemas', function() { + const tests = { + 'multiple schemas for CSFLE': { + modelFactory: () => { + connection = createConnection(); + const encrypt = { + keyId: [keyId], + algorithm + }; + const model1 = connection.model('Model1', new Schema({ + a: { + type: String, + encrypt + } + }, { + encryptionType: 'csfle' + })); + const model2 = connection.model('Model2', new Schema({ + b: { + type: String, + encrypt + } + }, { + encryptionType: 'csfle' + })); + + return { model1, model2 }; + } + }, + 'multiple schemas for QE': { + modelFactory: () => { + connection = createConnection(); + const model1 = connection.model('Model1', new Schema({ + a: { + type: String, + encrypt: { + keyId + } + } + }, { + encryptionType: 'queryableEncryption' + })); + const model2 = connection.model('Model2', new Schema({ + b: { + type: String, + encrypt: { + keyId + } + } + }, { + encryptionType: 'queryableEncryption' + })); + + return { model1, model2 }; + } + } + }; + + for (const [description, { modelFactory }] of Object.entries(tests)) { + describe(description, function() { + it('encrypts and decrypts', async function() { + const { model1, model2 } = modelFactory(); + await connection.openUri(process.env.MONGOOSE_TEST_URI, { + dbName: 'db', autoEncryption: { + keyVaultNamespace: 'keyvault.datakeys', + kmsProviders: { local: { key: LOCAL_KEY } }, + extraOptions: { + cryptdSharedLibRequired: true, + cryptSharedLibPath: process.env.CRYPT_SHARED_LIB_PATH + } + } + }); + + { + const [{ _id }] = await model1.insertMany([{ a: 'hello' }]); + const encryptedDoc = await utilClient.db('db').collection('model1').findOne({ _id }); + + assert.ok(isBsonType(encryptedDoc.a, 'Binary')); + assert.ok(encryptedDoc.a.sub_type === 6); + + const doc = await model1.findOne({ _id }); + assert.deepEqual(doc.a, 'hello'); + } + + { + const [{ _id }] = await model2.insertMany([{ b: 'world' }]); + const encryptedDoc = await utilClient.db('db').collection('model2').findOne({ _id }); + + assert.ok(isBsonType(encryptedDoc.b, 'Binary')); + assert.ok(encryptedDoc.b.sub_type === 6); + + const doc = await model2.findOne({ _id }); + assert.deepEqual(doc.b, 'world'); + } + }); + }); + } + }); + + describe('CSFLE and QE schemas on the same connection', function() { + it('encrypts and decrypts', async function() { + connection = createConnection(); + const model1 = connection.model('Model1', new Schema({ + a: { + type: String, + encrypt: { + keyId + } + } + }, { + encryptionType: 'queryableEncryption' + })); + const model2 = connection.model('Model2', new Schema({ + b: { + type: String, + encrypt: { + keyId: [keyId], + algorithm + } + } + }, { + encryptionType: 'csfle' + })); + await connection.openUri(process.env.MONGOOSE_TEST_URI, { + dbName: 'db', autoEncryption: { + keyVaultNamespace: 'keyvault.datakeys', + kmsProviders: { local: { key: LOCAL_KEY } }, extraOptions: { cryptdSharedLibRequired: true, cryptSharedLibPath: process.env.CRYPT_SHARED_LIB_PATH } } + }); + + { + const [{ _id }] = await model1.insertMany([{ a: 'hello' }]); + const encryptedDoc = await utilClient.db('db').collection('model1').findOne({ _id }); + + assert.ok(isBsonType(encryptedDoc.a, 'Binary')); + assert.ok(encryptedDoc.a.sub_type === 6); + + const doc = await model1.findOne({ _id }); + assert.deepEqual(doc.a, 'hello'); } - ); - unencryptedClient = new mongodb.MongoClient(process.env.MONGOOSE_TEST_URI); - }); + { + const [{ _id }] = await model2.insertMany([{ b: 'world' }]); + const encryptedDoc = await utilClient.db('db').collection('model2').findOne({ _id }); + + assert.ok(isBsonType(encryptedDoc.b, 'Binary')); + assert.ok(encryptedDoc.b.sub_type === 6); - afterEach(async function() { - await keyVaultClient.close(); - await encryptedClient.close(); - await unencryptedClient.close(); + const doc = await model2.findOne({ _id }); + assert.deepEqual(doc.b, 'world'); + } + }); }); - it('ci set-up should support basic mongodb auto-encryption integration', async() => { - await encryptedClient.connect(); - const { insertedId } = await encryptedClient.db('db').collection('coll').insertOne({ a: 1 }); + describe('Models with discriminators', function() { + let discrim1, discrim2, model; + + describe('csfle', function() { + beforeEach(async function() { + connection = createConnection(); + + const schema = new Schema({ + name: { + type: String, encrypt: { keyId: [keyId], algorithm } + } + }, { + encryptionType: 'csfle' + }); + model = connection.model('Schema', schema); + discrim1 = model.discriminator('Test', new Schema({ + age: { + type: Int32, encrypt: { keyId: [keyId], algorithm } + } + }, { + encryptionType: 'csfle' + })); + + discrim2 = model.discriminator('Test2', new Schema({ + dob: { + type: Int32, encrypt: { keyId: [keyId], algorithm } + } + }, { + encryptionType: 'csfle' + })); + + + await connection.openUri(process.env.MONGOOSE_TEST_URI, { + dbName: 'db', autoEncryption: { + keyVaultNamespace: 'keyvault.datakeys', + kmsProviders: { local: { key: LOCAL_KEY } }, + extraOptions: { + cryptdSharedLibRequired: true, + cryptSharedLibPath: process.env.CRYPT_SHARED_LIB_PATH + } + } + }); + }); + it('encrypts', async function() { + { + const doc = new discrim1({ name: 'bailey', age: 32 }); + await doc.save(); + + const encryptedDoc = await utilClient.db('db').collection('schemas').findOne({ _id: doc._id }); + + isEncryptedValue(encryptedDoc, 'age'); + } + + { + const doc = new discrim2({ name: 'bailey', dob: 32 }); + await doc.save(); + + const encryptedDoc = await utilClient.db('db').collection('schemas').findOne({ _id: doc._id }); + + isEncryptedValue(encryptedDoc, 'dob'); + } + }); + + it('decrypts', async function() { + { + const doc = new discrim1({ name: 'bailey', age: 32 }); + await doc.save(); + + const decryptedDoc = await discrim1.findOne({ _id: doc._id }); + + assert.equal(decryptedDoc.age, 32); + } + + { + const doc = new discrim2({ name: 'bailey', dob: 32 }); + await doc.save(); + + const decryptedDoc = await discrim2.findOne({ _id: doc._id }); + + assert.equal(decryptedDoc.dob, 32); + } + }); + }); + - // client not configured with autoEncryption, returns a encrypted binary type, meaning that encryption succeeded - const encryptedResult = await unencryptedClient.db('db').collection('coll').findOne({ _id: insertedId }); + describe('queryableEncryption', function() { + beforeEach(async function() { + connection = createConnection(); - assert.ok(encryptedResult); - assert.ok(encryptedResult.a); - assert.ok(isBsonType(encryptedResult.a, 'Binary')); - assert.ok(encryptedResult.a.sub_type === 6); + const schema = new Schema({ + name: { + type: String, encrypt: { keyId } + } + }, { + encryptionType: 'queryableEncryption' + }); + model = connection.model('Schema', schema); + discrim1 = model.discriminator('Test', new Schema({ + age: { + type: Int32, encrypt: { keyId: keyId2 } + } + }, { + encryptionType: 'queryableEncryption' + })); + + discrim2 = model.discriminator('Test2', new Schema({ + dob: { + type: Int32, encrypt: { keyId: keyId3 } + } + }, { + encryptionType: 'queryableEncryption' + })); + + await connection.openUri(process.env.MONGOOSE_TEST_URI, { + dbName: 'db', autoEncryption: { + keyVaultNamespace: 'keyvault.datakeys', + kmsProviders: { local: { key: LOCAL_KEY } }, + extraOptions: { + cryptdSharedLibRequired: true, + cryptSharedLibPath: process.env.CRYPT_SHARED_LIB_PATH + } + } + }); + }); + it('encrypts', async function() { + { + const doc = new discrim1({ name: 'bailey', age: 32 }); + await doc.save(); + + const encryptedDoc = await utilClient.db('db').collection('schemas').findOne({ _id: doc._id }); + + isEncryptedValue(encryptedDoc, 'age'); + } + + { + const doc = new discrim2({ name: 'bailey', dob: 32 }); + await doc.save(); + + const encryptedDoc = await utilClient.db('db').collection('schemas').findOne({ _id: doc._id }); + + isEncryptedValue(encryptedDoc, 'dob'); + } + }); + + it('decrypts', async function() { + { + const doc = new discrim1({ name: 'bailey', age: 32 }); + await doc.save(); + + const decryptedDoc = await discrim1.findOne({ _id: doc._id }); + + assert.equal(decryptedDoc.age, 32); + } + + { + const doc = new discrim2({ name: 'bailey', dob: 32 }); + await doc.save(); + + const decryptedDoc = await discrim2.findOne({ _id: doc._id }); + + assert.equal(decryptedDoc.dob, 32); + } + }); + }); - // when the encryptedClient runs a find, the original unencrypted value is returned - const unencryptedResult = await encryptedClient.db('db').collection('coll').findOne({ _id: insertedId }); - assert.ok(unencryptedResult); - assert.ok(unencryptedResult.a === 1); }); }); }); diff --git a/test/model.middleware.preposttypes.test.js b/test/model.middleware.preposttypes.test.js index 952bc901001..93a42f8dc1f 100644 --- a/test/model.middleware.preposttypes.test.js +++ b/test/model.middleware.preposttypes.test.js @@ -288,8 +288,8 @@ describe('pre/post hooks, type of this', function() { await Doc.findOneAndReplace({}, { data: 'valueRep' }).exec(); await Doc.findOneAndUpdate({}, { data: 'valueUpd' }).exec(); await Doc.replaceOne({}, { data: 'value' }).exec(); - await Doc.updateOne({ data: 'value' }).exec(); - await Doc.updateMany({ data: 'value' }).exec(); + await Doc.updateOne({}, { data: 'value' }).exec(); + await Doc.updateMany({}, { data: 'value' }).exec(); // MongooseQueryOrDocumentMiddleware, use Query await Doc.deleteOne({}).exec(); await Doc.create({ data: 'value' }); diff --git a/test/model.test.js b/test/model.test.js index da870125e0d..76ccbf0235d 100644 --- a/test/model.test.js +++ b/test/model.test.js @@ -6,6 +6,7 @@ const sinon = require('sinon'); const start = require('./common'); +const CastError = require('../lib/error/cast'); const assert = require('assert'); const { once } = require('events'); const random = require('./util').random; @@ -368,7 +369,7 @@ describe('Model', function() { assert.equal(post.get('comments')[0].comments[0].isNew, true); post.invalidate('title'); // force error - await post.save().catch(() => {}); + await post.save().catch(() => { }); assert.equal(post.isNew, true); assert.equal(post.get('comments')[0].isNew, true); assert.equal(post.get('comments')[0].comments[0].isNew, true); @@ -2479,7 +2480,7 @@ describe('Model', function() { const DefaultErr = db.model('Test', DefaultErrSchema); - new DefaultErr().save().catch(() => {}); + new DefaultErr().save().catch(() => { }); await new Promise(resolve => { DefaultErr.once('error', function(err) { @@ -3043,7 +3044,7 @@ describe('Model', function() { const Location = db.model('Test', LocationSchema); - await Location.collection.drop().catch(() => {}); + await Location.collection.drop().catch(() => { }); await Location.init(); await Location.create({ @@ -3512,7 +3513,7 @@ describe('Model', function() { listener = null; // Change stream may still emit "MongoAPIError: ChangeStream is closed" because change stream // may still poll after close. - changeStream.on('error', () => {}); + changeStream.on('error', () => { }); changeStream.close(); changeStream = null; }); @@ -3664,7 +3665,7 @@ describe('Model', function() { // Change stream may still emit "MongoAPIError: ChangeStream is closed" because change stream // may still poll after close. - changeStream.on('error', () => {}); + changeStream.on('error', () => { }); await changeStream.close(); await db.close(); }); @@ -3682,7 +3683,7 @@ describe('Model', function() { // Change stream may still emit "MongoAPIError: ChangeStream is closed" because change stream // may still poll after close. - changeStream.on('error', () => {}); + changeStream.on('error', () => { }); const close = changeStream.close(); await db.asPromise(); @@ -3708,7 +3709,7 @@ describe('Model', function() { // Change stream may still emit "MongoAPIError: ChangeStream is closed" because change stream // may still poll after close. - changeStream.on('error', () => {}); + changeStream.on('error', () => { }); changeStream.close(); const closedData = await closed; @@ -4121,7 +4122,7 @@ describe('Model', function() { { ordered: false, throwOnValidationError: true } ).then(() => null, err => err); assert.ok(err); - assert.equal(err.name, 'MongooseBulkWriteError'); + assert.equal(err.name, 'MongooseBulkWriteError', err.stack); assert.equal(err.validationErrors[0].errors['num'].name, 'CastError'); }); @@ -4174,7 +4175,7 @@ describe('Model', function() { assert.strictEqual(r2.testArray[0].nonexistentProp, undefined); }); - it('handles overwriteDiscriminatorKey (gh-15040)', async function() { + it('handles overwriteDiscriminatorKey (gh-15218) (gh-15040)', async function() { const dSchema1 = new mongoose.Schema({ field1: String }); @@ -4202,7 +4203,7 @@ describe('Model', function() { assert.equal(r1.field1, 'field1'); assert.equal(r1.key, type1Key); - const field2 = 'field2'; + let field2 = 'field2'; await TestModel.bulkWrite([{ updateOne: { filter: { _id: r1._id }, @@ -4214,7 +4215,13 @@ describe('Model', function() { } }]); - const r2 = await TestModel.findById(r1._id); + let r2 = await TestModel.findById(r1._id); + assert.equal(r2.key, type2Key); + assert.equal(r2.field2, field2); + + field2 = 'field2 updated again'; + await TestModel.updateOne({ _id: r1._id }, { $set: { key: type2Key, field2 } }, { overwriteDiscriminatorKey: true }); + r2 = await TestModel.findById(r1._id); assert.equal(r2.key, type2Key); assert.equal(r2.field2, field2); }); @@ -4701,6 +4708,46 @@ describe('Model', function() { assert.equal(err.validationErrors[0].path, 'age'); assert.equal(err.results[0].path, 'age'); }); + + it('bulkWrite should return both write errors and validation errors in error.results (gh-15265)', async function() { + const userSchema = new Schema({ _id: Number, age: { type: Number } }); + const User = db.model('User', userSchema); + + const createdUser = await User.create({ _id: 1, name: 'Test' }); + + const err = await User.bulkWrite([ + { + updateOne: { + filter: { _id: createdUser._id }, + update: { $set: { age: 'NaN' } } + } + }, + { + insertOne: { + document: { _id: 3, age: 14 } + } + }, + { + insertOne: { + document: { _id: 1, age: 13 } + } + }, + { + insertOne: { + document: { _id: 1, age: 14 } + } + } + ], { ordered: false, throwOnValidationError: true }) + .then(() => null) + .catch(err => err); + + assert.ok(err); + assert.strictEqual(err.mongoose.results.length, 4); + assert.ok(err.mongoose.results[0] instanceof CastError); + assert.strictEqual(err.mongoose.results[1], null); + assert.equal(err.mongoose.results[2].constructor.name, 'WriteError'); + assert.equal(err.mongoose.results[3].constructor.name, 'WriteError'); + }); }); it('deleteOne with cast error (gh-5323)', async function() { @@ -5540,7 +5587,7 @@ describe('Model', function() { const Model = db.model('User', userSchema); - await Model.collection.drop().catch(() => {}); + await Model.collection.drop().catch(() => { }); await Model.createCollection(); const collectionName = Model.collection.name; @@ -5574,7 +5621,7 @@ describe('Model', function() { const Test = db.model('Test', schema, 'Test'); await Test.init(); - await Test.collection.drop().catch(() => {}); + await Test.collection.drop().catch(() => { }); await Test.createCollection(); const collections = await Test.db.db.listCollections().toArray(); @@ -5583,7 +5630,7 @@ describe('Model', function() { assert.equal(coll.type, 'timeseries'); assert.equal(coll.options.timeseries.timeField, 'timestamp'); - await Test.collection.drop().catch(() => {}); + await Test.collection.drop().catch(() => { }); }); it('createCollection() enforces expireAfterSeconds (gh-11229)', async function() { @@ -5604,7 +5651,7 @@ describe('Model', function() { const Test = db.model('TestGH11229Var1', schema); - await Test.collection.drop().catch(() => {}); + await Test.collection.drop().catch(() => { }); await Test.createCollection({ expireAfterSeconds: 5 }); const collOptions = await Test.collection.options(); @@ -5632,7 +5679,7 @@ describe('Model', function() { const Test = db.model('TestGH11229Var2', schema, 'TestGH11229Var2'); - await Test.collection.drop().catch(() => {}); + await Test.collection.drop().catch(() => { }); await Test.createCollection({ expires: '5 seconds' }); const collOptions = await Test.collection.options(); @@ -5660,7 +5707,7 @@ describe('Model', function() { const Test = db.model('TestGH11229Var3', schema); - await Test.collection.drop().catch(() => {}); + await Test.collection.drop().catch(() => { }); await Test.createCollection(); const collOptions = await Test.collection.options(); @@ -5688,7 +5735,7 @@ describe('Model', function() { const Test = db.model('TestGH11229Var4', schema); - await Test.collection.drop().catch(() => {}); + await Test.collection.drop().catch(() => { }); await Test.createCollection(); const collOptions = await Test.collection.options(); @@ -5716,7 +5763,7 @@ describe('Model', function() { const Test = db.model('Test', schema, 'Test'); await Test.init(); - await Test.collection.drop().catch(() => {}); + await Test.collection.drop().catch(() => { }); await Test.createCollection(); const collections = await Test.db.db.listCollections().toArray(); @@ -5725,7 +5772,7 @@ describe('Model', function() { assert.deepEqual(coll.options.clusteredIndex.key, { _id: 1 }); assert.equal(coll.options.clusteredIndex.name, 'clustered test'); - await Test.collection.drop().catch(() => {}); + await Test.collection.drop().catch(() => { }); }); it('mongodb actually removes expired documents (gh-11229)', async function() { @@ -5747,7 +5794,7 @@ describe('Model', function() { const Test = db.model('TestMongoDBExpireRemoval', schema); - await Test.collection.drop().catch(() => {}); + await Test.collection.drop().catch(() => { }); await Test.createCollection({ expireAfterSeconds: 5 }); await Test.insertMany([ @@ -5845,7 +5892,7 @@ describe('Model', function() { const Model = db.model('User', userSchema); - await Model.collection.drop().catch(() => {}); + await Model.collection.drop().catch(() => { }); await Model.createCollection(); await Model.createCollection(); @@ -6486,17 +6533,9 @@ describe('Model', function() { assert.deepEqual( res, { - result: { - ok: 1, - writeErrors: [], - writeConcernErrors: [], - insertedIds: [], - nInserted: 0, - nUpserted: 0, - nMatched: 0, - nModified: 0, - nRemoved: 0, - upserted: [] + mongoose: { + results: [], + validationErrors: [] }, insertedCount: 0, matchedCount: 0, @@ -6508,7 +6547,20 @@ describe('Model', function() { n: 0 } ); - + assert.deepEqual(res.result, { + ok: 1, + writeErrors: [], + writeConcernErrors: [], + insertedIds: [], + nInserted: 0, + nUpserted: 0, + nMatched: 0, + nModified: 0, + nRemoved: 0, + upserted: [] + }); + + assert.equal(typeof res.getWriteErrorAt, 'function'); }); it('Model.bulkWrite(...) does not throw an error with upsert:true, setDefaultsOnInsert: true (gh-9157)', async function() { @@ -6525,7 +6577,7 @@ describe('Model', function() { await User.bulkWrite([ { updateOne: { - filter: { }, + filter: {}, update: { friends: ['Sam'] }, upsert: true, setDefaultsOnInsert: true @@ -6548,18 +6600,6 @@ describe('Model', function() { assert.deepEqual( res, { - result: { - ok: 1, - writeErrors: [], - writeConcernErrors: [], - insertedIds: [], - nInserted: 0, - nUpserted: 0, - nMatched: 0, - nModified: 0, - nRemoved: 0, - upserted: [] - }, insertedCount: 0, matchedCount: 0, modifiedCount: 0, @@ -6567,9 +6607,30 @@ describe('Model', function() { upsertedCount: 0, upsertedIds: {}, insertedIds: {}, - n: 0 + n: 0, + mongoose: { + results: [], + validationErrors: [] + } } ); + assert.deepEqual( + res.result, + { + ok: 1, + writeErrors: [], + writeConcernErrors: [], + insertedIds: [], + nInserted: 0, + nUpserted: 0, + nMatched: 0, + nModified: 0, + nRemoved: 0, + upserted: [] + } + ); + + assert.equal(typeof res.getWriteErrorAt, 'function'); }); it('allows calling `create()` after `bulkWrite()` (gh-9350)', async function() { @@ -7002,7 +7063,7 @@ describe('Model', function() { }); it('insertMany should throw an error if there were operations that failed validation, ' + - 'but all operations that passed validation succeeded (gh-14572) (gh-13256)', async function() { + 'but all operations that passed validation succeeded (gh-14572) (gh-13256)', async function() { const userSchema = new Schema({ age: { type: Number } }); @@ -7040,6 +7101,41 @@ describe('Model', function() { assert.deepStrictEqual(docs.map(doc => doc.age), [12, 12]); }); + it('insertMany should return both write errors and validation errors in error.results (gh-15265)', async function() { + const userSchema = new Schema({ _id: Number, age: { type: Number } }); + const User = db.model('User', userSchema); + await User.insertOne({ _id: 1, age: 12 }); + + const err = await User.insertMany([ + { _id: 1, age: 'NaN' }, + { _id: 3, age: 14 }, + { _id: 1, age: 13 }, + { _id: 1, age: 14 } + ], { ordered: false }).then(() => null).catch(err => err); + + assert.ok(err); + assert.strictEqual(err.results.length, 4); + assert.ok(err.results[0] instanceof ValidationError); + assert.ok(err.results[1] instanceof User); + assert.ok(err.results[2].err); + assert.ok(err.results[3].err); + }); + + it('insertMany should return both write errors and validation errors in error.results with rawResult (gh-15265)', async function() { + const userSchema = new Schema({ _id: Number, age: { type: Number } }); + const User = db.model('User', userSchema); + + const res = await User.insertMany([ + { _id: 1, age: 'NaN' }, + { _id: 3, age: 14 } + ], { ordered: false, rawResult: true }); + + assert.ok(res); + assert.strictEqual(res.mongoose.results.length, 2); + assert.ok(res.mongoose.results[0] instanceof ValidationError); + assert.ok(res.mongoose.results[1] instanceof User); + }); + it('returns writeResult on success', async() => { const userSchema = new Schema({ @@ -8020,7 +8116,7 @@ describe('Model', function() { decoratorSchema.loadClass(Decorator); // Define discriminated class before model is compiled - class Deco1 extends Decorator { whoAmI() { return 'I am Test1'; }} + class Deco1 extends Decorator { whoAmI() { return 'I am Test1'; } } const deco1Schema = new Schema({}); deco1Schema.loadClass(Deco1); decoratorSchema.discriminator('Test1', deco1Schema); @@ -8032,7 +8128,7 @@ describe('Model', function() { const shopModel = db.model('Test', shopSchema); // Define another discriminated class after the model is compiled - class Deco2 extends Decorator { whoAmI() { return 'I am Test2'; }} + class Deco2 extends Decorator { whoAmI() { return 'I am Test2'; } } const deco2Schema = new Schema({}); deco2Schema.loadClass(Deco2); decoratorSchema.discriminator('Test2', deco2Schema); @@ -8158,7 +8254,7 @@ describe('Model', function() { }); it('insertMany should throw an error if there were operations that failed validation, ' + - 'but all operations that passed validation succeeded (gh-13256)', async function() { + 'but all operations that passed validation succeeded (gh-13256)', async function() { const userSchema = new Schema({ age: { type: Number } }); @@ -8505,6 +8601,15 @@ describe('Model', function() { }); }); + it('throws error if calling `updateMany()` with no update param (gh-15190)', async function() { + const Test = db.model('Test', mongoose.Schema({ foo: String })); + + assert.throws( + () => Test.updateMany({ foo: 'bar' }), + { message: 'updateMany `update` parameter cannot be nullish' } + ); + }); + describe('insertOne() (gh-14843)', function() { it('should insert a new document', async function() { const userSchema = new Schema({ diff --git a/test/query.test.js b/test/query.test.js index bca5f706cfd..13b5b677b78 100644 --- a/test/query.test.js +++ b/test/query.test.js @@ -1962,7 +1962,7 @@ describe('Query', function() { }); schema.pre('deleteOne', { document: true, query: false }, async function() { - await this.constructor.updateOne({ isDeleted: true }); + await this.updateOne({ isDeleted: true }); this.$isDeleted(true); }); diff --git a/test/schema.number.test.js b/test/schema.number.test.js index b8f051170b6..99c69ca1540 100644 --- a/test/schema.number.test.js +++ b/test/schema.number.test.js @@ -1,5 +1,6 @@ 'use strict'; +const assert = require('assert'); const start = require('./common'); const mongoose = start.mongoose; @@ -18,4 +19,26 @@ describe('SchemaNumber', function() { }); }); }); + + it('allows calling `min()` with no message arg (gh-15236)', async function() { + const schema = new Schema({ x: { type: Number } }); + schema.path('x').min(0); + + const err = await new Promise((resolve) => { + schema.path('x').doValidate(-1, err => { + resolve(err); + }); + }); + assert.ok(err); + assert.equal(err.message, 'Path `x` (-1) is less than minimum allowed value (0).'); + + schema.path('x').min(0, 'Invalid value!'); + + const err2 = await new Promise((resolve) => { + schema.path('x').doValidate(-1, err => { + resolve(err); + }); + }); + assert.equal(err2.message, 'Invalid value!'); + }); }); diff --git a/test/schema.test.js b/test/schema.test.js index 416502b03a5..a012376d28a 100644 --- a/test/schema.test.js +++ b/test/schema.test.js @@ -2173,7 +2173,7 @@ describe('schema', function() { const keys = Object.keys(SchemaStringOptions.prototype). filter(key => key !== 'constructor' && key !== 'populate'); const functions = Object.keys(Schema.Types.String.prototype). - filter(key => ['constructor', 'cast', 'castForQuery', 'checkRequired', 'toJSONSchema'].indexOf(key) === -1); + filter(key => ['constructor', 'cast', 'castForQuery', 'checkRequired', 'toJSONSchema', 'autoEncryptionType'].indexOf(key) === -1); assert.deepEqual(keys.sort(), functions.sort()); }); @@ -3889,4 +3889,34 @@ describe('schema', function() { assert.throws(() => schema.toJSONSchema(), /unsupported SchemaType to JSON Schema: Mixed/); }); }); + + it('path() clears existing child schemas (gh-15253)', async function() { + const RecursiveSchema = new mongoose.Schema({ + data: String + }); + + const s = [RecursiveSchema]; + RecursiveSchema.path('nested', s); + assert.strictEqual(RecursiveSchema.childSchemas.length, 1); + RecursiveSchema.path('nested', s); + assert.strictEqual(RecursiveSchema.childSchemas.length, 1); + RecursiveSchema.path('nested', s); + assert.strictEqual(RecursiveSchema.childSchemas.length, 1); + RecursiveSchema.path('nested', s); + assert.strictEqual(RecursiveSchema.childSchemas.length, 1); + + const generateRecursiveDocument = (depth, curr = 0) => { + return { + name: `Document of depth ${curr}`, + nested: depth > 0 ? new Array(3).fill().map(() => generateRecursiveDocument(depth - 1, curr + 1)) : [], + data: Math.random() + }; + }; + + const TestModel = db.model('Test', RecursiveSchema); + const data = generateRecursiveDocument(6); + const doc = new TestModel(data); + await doc.save(); + + }); }); diff --git a/test/schema.uuid.test.js b/test/schema.uuid.test.js index 77b7b2300fa..e93538f78cf 100644 --- a/test/schema.uuid.test.js +++ b/test/schema.uuid.test.js @@ -63,6 +63,8 @@ describe('SchemaUUID', function() { const errors = res.errors; assert.strictEqual(Object.keys(errors).length, 1); assert.ok(errors.x instanceof mongoose.Error.CastError); + + assert.ok(errors.x.reason.message.includes('not a valid UUID string'), errors.x.reason.message); }); it('should work with $in and $nin and $all', async function() { diff --git a/test/types.map.test.js b/test/types.map.test.js index c6486e507ab..a61850fa9a6 100644 --- a/test/types.map.test.js +++ b/test/types.map.test.js @@ -1150,4 +1150,33 @@ describe('Map', function() { const doc = await CarModel.findById(car._id); assert.deepStrictEqual(doc.owners.get('abc').toObject(), [{ name: 'Bill' }]); }); + + it('handles loading and modifying map of document arrays (gh-15196)', async function() { + const schema = new Schema({ + name: { type: String, required: true }, + test_map: { + type: Map, + of: [{ + _id: false, + num: { type: Number, required: true }, + bool: { type: Boolean, required: true } + }] + } + }); + const Test = db.model('Test', schema); + + let doc1 = new Test({ name: 'name1', test_map: new Map() }); + await doc1.save(); + + doc1 = await Test.findOne({ _id: doc1._id }); + + doc1.test_map.set('key1', []); + await doc1.save(); + + doc1 = await Test.findOne({ _id: doc1._id }); + assert.deepStrictEqual(doc1.toObject().test_map, new Map([['key1', []]])); + + doc1 = await Test.findOne({ _id: doc1._id }).lean(); + assert.deepStrictEqual(doc1.test_map, { key1: [] }); + }); }); diff --git a/test/types/expressions.test.ts b/test/types/expressions.test.ts index baa788534df..7ae5ebe1a35 100644 --- a/test/types/expressions.test.ts +++ b/test/types/expressions.test.ts @@ -313,3 +313,7 @@ const filterLimit: Expression.Filter = { } ]; })(); + +function gh15209() { + const query: PipelineStage[] = [{ $group: { _id: null, median: { $median: { input: '$value', method: 'approximate' } } } }]; +} diff --git a/test/types/middleware.test.ts b/test/types/middleware.test.ts index 96e40ecbe81..308c9e5e08d 100644 --- a/test/types/middleware.test.ts +++ b/test/types/middleware.test.ts @@ -200,3 +200,60 @@ function gh13601() { expectAssignable(this); }); } + +function gh15242() { + type PostPersisted = { + title: string, + postTime: Date + }; + + type ValidatorThis = DocumentValidatorThis | QueryValidatorThis; + type DocumentValidatorThis = HydratedDocument; + type QueryValidatorThis = Query; + + const PostSchema = new Schema({ + title: { type: String, required: true }, + postTime: { + type: Date, + required: true, + validate: { + validator: async function(this: ValidatorThis, postTime: Date): Promise { + return true; + } + } + } + }); + + type PostRecord = HydratedDocument; + const PostModel = model('Post', PostSchema); +} + +function gh15242WithVirtuals() { + type PostPersisted = { + title: string, + postTime: Date + }; + + type ValidatorThis = DocumentValidatorThis | QueryValidatorThis; + type DocumentValidatorThis = HydratedDocument; + type QueryValidatorThis = Query; + + const PostSchema = new Schema({ + title: { type: String, required: true }, + postTime: { + type: Date, + required: true, + validate: { + validator: async function(this: ValidatorThis, postTime: Date): Promise { + if (!(this instanceof Query)) { + expectType(this.myVirtual); + } + return true; + } + } + } + }, { virtuals: { myVirtual: { get() { return 42; } } } }); + + type PostRecord = HydratedDocument; + const PostModel = model('Post', PostSchema); +} diff --git a/test/types/schema.test.ts b/test/types/schema.test.ts index 13408eaf293..cc575fcb19c 100644 --- a/test/types/schema.test.ts +++ b/test/types/schema.test.ts @@ -21,11 +21,9 @@ import { Types, Query, model, - ValidateOpts, - BufferToBinary + ValidateOpts } from 'mongoose'; -import { Binary } from 'mongodb'; -import { IsPathRequired } from '../../types/inferschematype'; +import { Binary, BSON } from 'mongodb'; import { expectType, expectError, expectAssignable } from 'tsd'; import { ObtainDocumentPathType, ResolvePathType } from '../../types/inferschematype'; @@ -591,6 +589,16 @@ const batchSchema2 = new Schema({ name: String }, { discriminatorKey: 'kind', st } } }); batchSchema2.discriminator('event', eventSchema2); + +function encryptionType() { + const keyId = new BSON.UUID(); + expectError(new Schema({ name: { type: String, encrypt: { keyId } } }, { encryptionType: 'newFakeEncryptionType' })); + expectError(new Schema({ name: { type: String, encrypt: { keyId } } }, { encryptionType: 1 })); + + expectType(new Schema({ name: { type: String, encrypt: { keyId } } }, { encryptionType: 'queryableEncryption' })); + expectType(new Schema({ name: { type: String, encrypt: { keyId } } }, { encryptionType: 'csfle' })); +} + function gh11828() { interface IUser { name: string; @@ -1725,3 +1733,24 @@ async function gh12959() { const leanDoc = await TestModel.findOne().lean().orFail(); expectType(leanDoc.__v); } + +async function gh15236() { + const schema = new Schema({ + myNum: { type: Number } + }); + + schema.path('myNum').min(0); +} + +function gh15244() { + const schema = new Schema({}); + schema.discriminator('Name', new Schema({}), { value: 'value' }); +} + +async function schemaDouble() { + const schema = new Schema({ balance: 'Double' }); + const TestModel = model('Test', schema); + + const doc = await TestModel.findOne().orFail(); + expectType(doc.balance); +} diff --git a/test/types/schemaTypeOptions.test.ts b/test/types/schemaTypeOptions.test.ts index 3514b01d7e9..9e501322fb5 100644 --- a/test/types/schemaTypeOptions.test.ts +++ b/test/types/schemaTypeOptions.test.ts @@ -1,3 +1,4 @@ +import { BSON } from 'mongodb'; import { AnyArray, Schema, @@ -74,3 +75,37 @@ function defaultOptions() { expectType>(new Schema.Types.Subdocument('none').defaultOptions); expectType>(new Schema.Types.UUID('none').defaultOptions); } + +function encrypt() { + const uuid = new BSON.UUID(); + const binary = new BSON.Binary(); + + new SchemaTypeOptions()['encrypt'] = { keyId: uuid, algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' }; + new SchemaTypeOptions()['encrypt'] = { keyId: uuid, algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Random' }; + new SchemaTypeOptions()['encrypt'] = { keyId: uuid, algorithm: undefined }; + new SchemaTypeOptions()['encrypt'] = { keyId: [uuid], algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Random' }; + + // qe + valid queries + new SchemaTypeOptions()['encrypt'] = { keyId: uuid, queries: 'equality' }; + new SchemaTypeOptions()['encrypt'] = { keyId: uuid, queries: 'range' }; + new SchemaTypeOptions()['encrypt'] = { keyId: uuid, queries: undefined }; + + // empty object + expectError['encrypt']>({}); + + // invalid keyId + expectError['encrypt']>({ keyId: 'fakeId' }); + + // missing keyId + expectError['encrypt']>({ queries: 'equality' }); + expectError['encrypt']>({ algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' }); + + // invalid algorithm + expectError['encrypt']>({ keyId: uuid, algorithm: 'SHA_FAKE_ALG' }); + + // invalid queries + expectError['encrypt']>({ keyId: uuid, queries: 'fakeQueryOption' }); + + // invalid input option + expectError['encrypt']>({ keyId: uuid, invalidKey: 'fakeKeyOption' }); +} diff --git a/types/expressions.d.ts b/types/expressions.d.ts index 188c377079a..998e5f0dbe1 100644 --- a/types/expressions.d.ts +++ b/types/expressions.d.ts @@ -2301,6 +2301,18 @@ declare module 'mongoose' { } } + export interface Median { + /** + * Returns an approximation of the median, the 50th percentile, as a scalar value. + * + * @see https://www.mongodb.com/docs/v7.0/reference/operator/aggregation/median/ + */ + $median: { + input: number | Expression, + method: 'approximate' + } + } + export interface StdDevPop { /** * Calculates the population standard deviation of the input values. Use if the values encompass the entire @@ -2859,6 +2871,7 @@ declare module 'mongoose' { Expression.Locf | Expression.Max | Expression.MaxN | + Expression.Median | Expression.Min | Expression.MinN | Expression.Push | @@ -2891,6 +2904,7 @@ declare module 'mongoose' { Expression.ExpMovingAvg | Expression.Integral | Expression.Max | + Expression.Median | Expression.Min | Expression.StdDevPop | Expression.StdDevSamp | @@ -2963,6 +2977,7 @@ declare module 'mongoose' { Expression.LastN | Expression.Max | Expression.MaxN | + Expression.Median | Expression.MergeObjects | Expression.Min | Expression.MinN | diff --git a/types/index.d.ts b/types/index.d.ts index 32554a048b2..1c3879d476f 100644 --- a/types/index.d.ts +++ b/types/index.d.ts @@ -204,30 +204,32 @@ declare module 'mongoose' { } export interface ToObjectOptions> { - /** apply all getters (path and virtual getters) */ - getters?: boolean; - /** apply virtual getters (can override getters option) */ - virtuals?: boolean | string[]; /** if `options.virtuals = true`, you can set `options.aliases = false` to skip applying aliases. This option is a no-op if `options.virtuals = false`. */ aliases?: boolean; + /** if true, replace any conventionally populated paths with the original id in the output. Has no affect on virtual populated paths. */ + depopulate?: boolean; + /** if true, convert Maps to POJOs. Useful if you want to `JSON.stringify()` the result of `toObject()`. */ + flattenMaps?: boolean; + /** if true, convert any ObjectIds in the result to 24 character hex strings. */ + flattenObjectIds?: boolean; + /** apply all getters (path and virtual getters) */ + getters?: boolean; /** remove empty objects (defaults to true) */ minimize?: boolean; + /** If true, the resulting object will only have fields that are defined in the document's schema. By default, `toJSON()` & `toObject()` returns all fields in the underlying document from MongoDB, including ones that are not listed in the schema. */ + schemaFieldsOnly?: boolean; /** if set, mongoose will call this function to allow you to transform the returned object */ transform?: boolean | (( doc: THydratedDocumentType, ret: Record, options: ToObjectOptions ) => any); - /** if true, replace any conventionally populated paths with the original id in the output. Has no affect on virtual populated paths. */ - depopulate?: boolean; - /** if false, exclude the version key (`__v` by default) from the output */ - versionKey?: boolean; - /** if true, convert Maps to POJOs. Useful if you want to `JSON.stringify()` the result of `toObject()`. */ - flattenMaps?: boolean; - /** if true, convert any ObjectIds in the result to 24 character hex strings. */ - flattenObjectIds?: boolean; /** If true, omits fields that are excluded in this document's projection. Unless you specified a projection, this will omit any field that has `select: false` in the schema. */ useProjection?: boolean; + /** if false, exclude the version key (`__v` by default) from the output */ + versionKey?: boolean; + /** apply virtual getters (can override getters option) */ + virtuals?: boolean | string[]; } export type DiscriminatorModel = T extends Model @@ -273,10 +275,10 @@ declare module 'mongoose' { /** * Create a new schema */ - constructor(definition?: SchemaDefinition, RawDocType> | DocType, options?: SchemaOptions, TInstanceMethods, TQueryHelpers, TStaticMethods, TVirtuals, THydratedDocumentType> | ResolveSchemaOptions); + constructor(definition?: SchemaDefinition, RawDocType, THydratedDocumentType> | DocType, options?: SchemaOptions, TInstanceMethods, TQueryHelpers, TStaticMethods, TVirtuals, THydratedDocumentType> | ResolveSchemaOptions); /** Adds key path / schema type pairs to this schema. */ - add(obj: SchemaDefinition> | Schema, prefix?: string): this; + add(obj: SchemaDefinition, RawDocType> | Schema, prefix?: string): this; /** * Add an alias for `path`. This means getting or setting the `alias` @@ -297,7 +299,7 @@ declare module 'mongoose' { /** Returns a copy of this schema */ clone(): T; - discriminator(name: string | number, schema: DisSchema): this; + discriminator(name: string | number, schema: DisSchema, options?: DiscriminatorOptions): this; /** Returns a new schema that has the picked `paths` from this schema. */ pick(paths: string[], options?: SchemaOptions): T; @@ -541,21 +543,21 @@ declare module 'mongoose' { ? DateSchemaDefinition : (Function | string); - export type SchemaDefinitionProperty = SchemaDefinitionWithBuiltInClass | - SchemaTypeOptions | - typeof SchemaType | - Schema | - Schema[] | - SchemaTypeOptions, EnforcedDocType>[] | - Function[] | - SchemaDefinition | - SchemaDefinition, EnforcedDocType>[] | - typeof Schema.Types.Mixed | - MixedSchemaTypeOptions; - - export type SchemaDefinition = T extends undefined + export type SchemaDefinitionProperty> = SchemaDefinitionWithBuiltInClass + | SchemaTypeOptions + | typeof SchemaType + | Schema + | Schema[] + | SchemaTypeOptions, EnforcedDocType, THydratedDocumentType>[] + | Function[] + | SchemaDefinition + | SchemaDefinition, EnforcedDocType, THydratedDocumentType>[] + | typeof Schema.Types.Mixed + | MixedSchemaTypeOptions; + + export type SchemaDefinition> = T extends undefined ? { [path: string]: SchemaDefinitionProperty; } - : { [path in keyof T]?: SchemaDefinitionProperty; }; + : { [path in keyof T]?: SchemaDefinitionProperty; }; export type AnyArray = T[] | ReadonlyArray; export type ExtractMongooseArray = T extends Types.Array ? AnyArray> : T; @@ -712,47 +714,53 @@ declare module 'mongoose' { [K in keyof T]: FlattenProperty; }; - export type BufferToBinaryProperty = T extends Buffer - ? mongodb.Binary - : T extends Types.DocumentArray - ? Types.DocumentArray> - : T extends Types.Subdocument - ? HydratedSingleSubdocument> - : BufferToBinary; + export type BufferToBinaryProperty = unknown extends Buffer + ? T + : T extends Buffer + ? mongodb.Binary + : T extends Types.DocumentArray + ? Types.DocumentArray> + : T extends Types.Subdocument + ? HydratedSingleSubdocument> + : BufferToBinary; /** * Converts any Buffer properties into mongodb.Binary instances, which is what `lean()` returns */ - export type BufferToBinary = T extends Buffer - ? mongodb.Binary - : T extends Document - ? T - : T extends TreatAsPrimitives - ? T - : T extends Record - ? { - [K in keyof T]: BufferToBinaryProperty - } - : T; + export type BufferToBinary = unknown extends Buffer + ? T + : T extends Buffer + ? mongodb.Binary + : T extends Document + ? T + : T extends TreatAsPrimitives + ? T + : T extends Record + ? { + [K in keyof T]: BufferToBinaryProperty + } + : T; /** - * Converts any Buffer properties into { type: 'buffer', data: [1, 2, 3] } format for JSON serialization - */ - export type BufferToJSON = T extends Buffer - ? { type: 'buffer', data: number[] } - : T extends Document - ? T - : T extends TreatAsPrimitives + * Converts any Buffer properties into { type: 'buffer', data: [1, 2, 3] } format for JSON serialization + */ + export type BufferToJSON = unknown extends Buffer + ? T + : T extends Buffer + ? { type: 'buffer', data: number[] } + : T extends Document ? T - : T extends Record ? { - [K in keyof T]: T[K] extends Buffer - ? { type: 'buffer', data: number[] } - : T[K] extends Types.DocumentArray - ? Types.DocumentArray> - : T[K] extends Types.Subdocument - ? HydratedSingleSubdocument - : BufferToBinary; - } : T; + : T extends TreatAsPrimitives + ? T + : T extends Record ? { + [K in keyof T]: T[K] extends Buffer + ? { type: 'buffer', data: number[] } + : T[K] extends Types.DocumentArray + ? Types.DocumentArray> + : T[K] extends Types.Subdocument + ? HydratedSingleSubdocument + : BufferToBinary; + } : T; /** * Converts any ObjectId properties into strings for JSON serialization @@ -829,7 +837,7 @@ declare module 'mongoose' { ? Types.DocumentArray> : FlattenMaps; export type actualPrimitives = string | boolean | number | bigint | symbol | null | undefined; - export type TreatAsPrimitives = actualPrimitives | NativeDate | RegExp | symbol | Error | BigInt | Types.ObjectId | Buffer | Function | mongodb.Binary; + export type TreatAsPrimitives = actualPrimitives | NativeDate | RegExp | symbol | Error | BigInt | Types.ObjectId | Buffer | Function | mongodb.Binary | mongodb.ClientSession; export type SchemaDefinitionType = T extends Document ? Omit> : T; diff --git a/types/inferschematype.d.ts b/types/inferschematype.d.ts index 38f8da1ed65..dac99d09d6c 100644 --- a/types/inferschematype.d.ts +++ b/types/inferschematype.d.ts @@ -235,13 +235,17 @@ type IsSchemaTypeFromBuiltinClass = T extends (typeof String) ? true : T extends Types.Decimal128 ? true - : T extends Buffer + : T extends NativeDate ? true - : T extends NativeDate + : T extends (typeof Schema.Types.Mixed) ? true - : T extends (typeof Schema.Types.Mixed) + : IfEquals extends true ? true - : IfEquals; + : unknown extends Buffer + ? false + : T extends Buffer + ? true + : false; /** * @summary Resolve path type by returning the corresponding type. @@ -308,14 +312,15 @@ type ResolvePathType extends true ? bigint : PathValueType extends 'bigint' | 'BigInt' | typeof Schema.Types.BigInt | typeof BigInt ? bigint : PathValueType extends 'uuid' | 'UUID' | typeof Schema.Types.UUID ? Buffer : - IfEquals extends true ? Buffer : - PathValueType extends MapConstructor | 'Map' ? Map> : - IfEquals extends true ? Map> : - PathValueType extends ArrayConstructor ? any[] : - PathValueType extends typeof Schema.Types.Mixed ? any: - IfEquals extends true ? any: - IfEquals extends true ? any: - PathValueType extends typeof SchemaType ? PathValueType['prototype'] : - PathValueType extends Record ? ObtainDocumentType : - unknown, + PathValueType extends 'double' | 'Double' | typeof Schema.Types.Double ? Types.Double : + IfEquals extends true ? Buffer : + PathValueType extends MapConstructor | 'Map' ? Map> : + IfEquals extends true ? Map> : + PathValueType extends ArrayConstructor ? any[] : + PathValueType extends typeof Schema.Types.Mixed ? any: + IfEquals extends true ? any: + IfEquals extends true ? any: + PathValueType extends typeof SchemaType ? PathValueType['prototype'] : + PathValueType extends Record ? ObtainDocumentType : + unknown, TypeHint>; diff --git a/types/models.d.ts b/types/models.d.ts index 8f5a94a059b..4ff5fe83ec3 100644 --- a/types/models.d.ts +++ b/types/models.d.ts @@ -308,7 +308,7 @@ declare module 'mongoose' { bulkWrite( writes: Array>, options: MongooseBulkWriteOptions & { ordered: false } - ): Promise; + ): Promise } }>; bulkWrite( writes: Array>, options?: MongooseBulkWriteOptions @@ -883,17 +883,20 @@ declare module 'mongoose' { /** Creates a `updateMany` query: updates all documents that match `filter` with `update`. */ updateMany( - filter?: RootFilterQuery, - update?: UpdateQuery | UpdateWithAggregationPipeline, + filter: RootFilterQuery, + update: UpdateQuery | UpdateWithAggregationPipeline, options?: (mongodb.UpdateOptions & MongooseUpdateQueryOptions) | null ): QueryWithHelpers; /** Creates a `updateOne` query: updates the first document that matches `filter` with `update`. */ updateOne( - filter?: RootFilterQuery, - update?: UpdateQuery | UpdateWithAggregationPipeline, + filter: RootFilterQuery, + update: UpdateQuery | UpdateWithAggregationPipeline, options?: (mongodb.UpdateOptions & MongooseUpdateQueryOptions) | null ): QueryWithHelpers; + updateOne( + update: UpdateQuery | UpdateWithAggregationPipeline + ): QueryWithHelpers; /** Creates a Query, applies the passed conditions, and returns the Query. */ where( diff --git a/types/populate.d.ts b/types/populate.d.ts index 8517c15865c..dac2a248217 100644 --- a/types/populate.d.ts +++ b/types/populate.d.ts @@ -39,6 +39,12 @@ declare module 'mongoose' { foreignField?: string; /** Set to `false` to prevent Mongoose from repopulating paths that are already populated */ forceRepopulate?: boolean; + /** + * Set to `true` to execute any populate queries one at a time, as opposed to in parallel. + * We recommend setting this option to `true` if using transactions, especially if also populating multiple paths or paths with multiple models. + * MongoDB server does **not** support multiple operations in parallel on a single transaction. + */ + ordered?: boolean; } interface PopulateOption { diff --git a/types/query.d.ts b/types/query.d.ts index fbebf1b6467..b9d0f6689fd 100644 --- a/types/query.d.ts +++ b/types/query.d.ts @@ -228,14 +228,14 @@ declare module 'mongoose' { type MergePopulatePaths> = QueryOp extends QueryOpThatReturnsDocument ? ResultType extends null - ? ResultType - : ResultType extends (infer U)[] - ? U extends Document - ? HydratedDocument, TDocOverrides, TQueryHelpers>[] - : (MergeType)[] - : ResultType extends Document - ? HydratedDocument, TDocOverrides, TQueryHelpers> - : MergeType + ? ResultType + : ResultType extends (infer U)[] + ? U extends Document + ? HydratedDocument, TDocOverrides, TQueryHelpers>[] + : (MergeType)[] + : ResultType extends Document + ? HydratedDocument, TDocOverrides, TQueryHelpers> + : MergeType : MergeType; class Query> implements SessionOperation { @@ -373,8 +373,8 @@ declare module 'mongoose' { ): QueryWithHelpers< Array< DocKey extends keyof WithLevel1NestedPaths - ? WithoutUndefined[DocKey]>> - : ResultType + ? WithoutUndefined[DocKey]>> + : ResultType >, DocType, THelpers, @@ -567,26 +567,26 @@ declare module 'mongoose' { val?: boolean | any ): QueryWithHelpers< ResultType extends null - ? GetLeanResultType | null - : GetLeanResultType, + ? GetLeanResultType | null + : GetLeanResultType, DocType, THelpers, RawDocType, QueryOp, TDocOverrides - >; + >; lean( val?: boolean | any ): QueryWithHelpers< ResultType extends null - ? LeanResultType | null - : LeanResultType, + ? LeanResultType | null + : LeanResultType, DocType, THelpers, RawDocType, QueryOp, TDocOverrides - >; + >; /** Specifies the maximum number of documents the query will return. */ limit(val: number): this; @@ -761,12 +761,12 @@ declare module 'mongoose' { {}, ResultType, ResultType extends any[] - ? ResultType extends HydratedDocument[] - ? HydratedDocument[] - : RawDocTypeOverride[] - : (ResultType extends HydratedDocument - ? HydratedDocument - : RawDocTypeOverride) | (null extends ResultType ? null : never) + ? ResultType extends HydratedDocument[] + ? HydratedDocument[] + : RawDocTypeOverride[] + : (ResultType extends HydratedDocument + ? HydratedDocument + : RawDocTypeOverride) | (null extends ResultType ? null : never) >, DocType, THelpers, @@ -850,20 +850,26 @@ declare module 'mongoose' { * the `multi` option. */ updateMany( - filter?: RootFilterQuery, - update?: UpdateQuery | UpdateWithAggregationPipeline, + filter: RootFilterQuery, + update: UpdateQuery | UpdateWithAggregationPipeline, options?: QueryOptions | null ): QueryWithHelpers; + updateMany( + update: UpdateQuery | UpdateWithAggregationPipeline + ): QueryWithHelpers; /** * Declare and/or execute this query as an updateOne() operation. Same as * `update()`, except it does not support the `multi` or `overwrite` options. */ updateOne( - filter?: RootFilterQuery, - update?: UpdateQuery | UpdateWithAggregationPipeline, + filter: RootFilterQuery, + update: UpdateQuery | UpdateWithAggregationPipeline, options?: QueryOptions | null ): QueryWithHelpers; + updateOne( + update: UpdateQuery | UpdateWithAggregationPipeline + ): QueryWithHelpers; /** * Sets the specified number of `mongod` servers, or tag set of `mongod` servers, diff --git a/types/schemaoptions.d.ts b/types/schemaoptions.d.ts index 4df87a806ea..f661e1643de 100644 --- a/types/schemaoptions.d.ts +++ b/types/schemaoptions.d.ts @@ -258,6 +258,11 @@ declare module 'mongoose' { * @default false */ overwriteModels?: boolean; + + /** + * Required when the schema is encrypted. + */ + encryptionType?: 'csfle' | 'queryableEncryption'; } interface DefaultSchemaOptions { diff --git a/types/schematypes.d.ts b/types/schematypes.d.ts index 5f364f0cea4..f24aa4f8595 100644 --- a/types/schematypes.d.ts +++ b/types/schematypes.d.ts @@ -1,3 +1,5 @@ +import * as BSON from 'bson'; + declare module 'mongoose' { /** The Mongoose Date [SchemaType](/docs/schematypes.html). */ @@ -56,7 +58,7 @@ declare module 'mongoose' { type DefaultType = T extends Schema.Types.Mixed ? any : Partial>; - class SchemaTypeOptions { + class SchemaTypeOptions> { type?: T extends string ? StringSchemaDefinition : T extends number ? NumberSchemaDefinition : @@ -65,19 +67,19 @@ declare module 'mongoose' { T extends Map ? SchemaDefinition : T extends Buffer ? SchemaDefinition : T extends Types.ObjectId ? ObjectIdSchemaDefinition : - T extends Types.ObjectId[] ? AnyArray | AnyArray> : - T extends object[] ? (AnyArray> | AnyArray>> | AnyArray, EnforcedDocType>>) : - T extends string[] ? AnyArray | AnyArray> : - T extends number[] ? AnyArray | AnyArray> : - T extends boolean[] ? AnyArray | AnyArray> : - T extends Function[] ? AnyArray | AnyArray, EnforcedDocType>> : + T extends Types.ObjectId[] ? AnyArray | AnyArray> : + T extends object[] ? (AnyArray> | AnyArray>> | AnyArray, EnforcedDocType, THydratedDocumentType>>) : + T extends string[] ? AnyArray | AnyArray> : + T extends number[] ? AnyArray | AnyArray> : + T extends boolean[] ? AnyArray | AnyArray> : + T extends Function[] ? AnyArray | AnyArray, EnforcedDocType, THydratedDocumentType>> : T | typeof SchemaType | Schema | SchemaDefinition | Function | AnyArray; /** Defines a virtual with the given name that gets/sets this path. */ alias?: string | string[]; /** Function or object describing how to validate this schematype. See [validation docs](https://mongoosejs.com/docs/validation.html). */ - validate?: SchemaValidator | AnyArray>; + validate?: SchemaValidator | AnyArray>; /** Allows overriding casting logic for this individual path. If a string, the given string overwrites Mongoose's default cast error message. */ cast?: string | @@ -207,6 +209,11 @@ declare module 'mongoose' { maxlength?: number | [number, string] | readonly [number, string]; [other: string]: any; + + /** + * If set, configures the field for automatic encryption. + */ + encrypt?: EncryptSchemaTypeOptions; } interface Validator { @@ -218,6 +225,28 @@ declare module 'mongoose' { type ValidatorFunction = (this: DocType, value: any, validatorProperties?: Validator) => any; + interface QueryEncryptionEncryptOptions { + /** The id of the dataKey to use for encryption. Must be a BSON binary subtype 4 (UUID). */ + keyId: BSON.Binary; + + /** + * Specifies the type of queries that the field can be queried on the encrypted field. + */ + queries?: 'equality' | 'range'; + } + + interface ClientSideEncryptionEncryptOptions { + /** The id of the dataKey to use for encryption. Must be a BSON binary subtype 4 (UUID). */ + keyId: [BSON.Binary]; + + /** + * The algorithm to use for encryption. + */ + algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' | 'AEAD_AES_256_CBC_HMAC_SHA_512-Random'; + } + + export type EncryptSchemaTypeOptions = QueryEncryptionEncryptOptions | ClientSideEncryptionEncryptOptions; + class SchemaType { /** SchemaType constructor */ constructor(path: string, options?: AnyObject, instance?: string); @@ -389,10 +418,10 @@ declare module 'mongoose' { expires(when: number | string): this; /** Sets a maximum date validator. */ - max(value: NativeDate, message: string): this; + max(value: NativeDate, message?: string): this; /** Sets a minimum date validator. */ - min(value: NativeDate, message: string): this; + min(value: NativeDate, message?: string): this; /** Default options for this SchemaType */ defaultOptions: Record; @@ -457,10 +486,10 @@ declare module 'mongoose' { enum(vals: number[]): this; /** Sets a maximum number validator. */ - max(value: number, message: string): this; + max(value: number, message?: string): this; /** Sets a minimum number validator. */ - min(value: number, message: string): this; + min(value: number, message?: string): this; /** Default options for this SchemaType */ defaultOptions: Record; diff --git a/types/types.d.ts b/types/types.d.ts index 503a9b2c9f2..9c56959182e 100644 --- a/types/types.d.ts +++ b/types/types.d.ts @@ -104,5 +104,7 @@ declare module 'mongoose' { } class UUID extends bson.UUID {} + + class Double extends bson.Double {} } } diff --git a/types/validation.d.ts b/types/validation.d.ts index 3310d954435..3e5f9befdfd 100644 --- a/types/validation.d.ts +++ b/types/validation.d.ts @@ -1,6 +1,11 @@ declare module 'mongoose' { - type SchemaValidator = RegExp | [RegExp, string] | Function | [Function, string] | ValidateOpts | ValidateOpts[]; + type SchemaValidator = RegExp + | [RegExp, string] + | Function + | [Function, string] + | ValidateOpts + | ValidateOpts[]; interface ValidatorProps { path: string;