diff --git a/.eslintrc.js b/.eslintrc.js
index 1bf13a95d2c..b4d7d1652d9 100644
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -56,29 +56,15 @@ module.exports = {
'@typescript-eslint/ban-types': 'off',
'@typescript-eslint/no-unused-vars': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
- '@typescript-eslint/indent': [
- 'warn',
- 2,
- {
- SwitchCase: 1,
- ignoredNodes: ['TSTypeParameterInstantiation']
- }
- ],
'@typescript-eslint/prefer-optional-chain': 'error',
- '@typescript-eslint/brace-style': 'error',
'@typescript-eslint/no-dupe-class-members': 'error',
'@typescript-eslint/no-redeclare': 'error',
- '@typescript-eslint/type-annotation-spacing': 'error',
- '@typescript-eslint/object-curly-spacing': [
- 'error',
- 'always'
- ],
- '@typescript-eslint/semi': 'error',
- '@typescript-eslint/space-before-function-paren': [
- 'error',
- 'never'
- ],
- '@typescript-eslint/space-infix-ops': 'off'
+ '@typescript-eslint/space-infix-ops': 'off',
+ '@typescript-eslint/no-require-imports': 'off',
+ '@typescript-eslint/no-empty-object-type': 'off',
+ '@typescript-eslint/no-wrapper-object-types': 'off',
+ '@typescript-eslint/no-unused-expressions': 'off',
+ '@typescript-eslint/no-unsafe-function-type': 'off'
}
},
{
diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml
index 052b34b1075..5afe4965aeb 100644
--- a/.github/workflows/benchmark.yml
+++ b/.github/workflows/benchmark.yml
@@ -22,13 +22,13 @@ jobs:
runs-on: ubuntu-20.04
name: Benchmark TypeScript Types
steps:
- - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- name: Setup node
- uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3
+ uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
- node-version: 16
+ node-version: 22
- run: npm install
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index efa77ce7fa8..d0cd8cca68d 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -21,7 +21,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml
index 0f9f032ac21..134fbe5ed5e 100644
--- a/.github/workflows/documentation.yml
+++ b/.github/workflows/documentation.yml
@@ -28,12 +28,12 @@ jobs:
runs-on: ubuntu-latest
name: Lint Markdown files
steps:
- - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup node
- uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3
+ uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
- node-version: 16
+ node-version: 22
- run: npm install
@@ -48,13 +48,13 @@ jobs:
runs-on: ubuntu-20.04
name: Test Generating Docs
steps:
- - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- run: git fetch --depth=1 --tags # download all tags for documentation
- name: Setup node
- uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3
+ uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
- node-version: 16
+ node-version: 22
- run: npm install
- name: Setup MongoDB
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 7bd4ceea192..f17b792e2b4 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -22,12 +22,12 @@ jobs:
runs-on: ubuntu-latest
name: Lint JS-Files
steps:
- - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup node
- uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3
+ uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
- node-version: 18
+ node-version: 22
- run: npm install
@@ -39,9 +39,9 @@ jobs:
strategy:
fail-fast: false
matrix:
- node: [16, 18, 20]
+ node: [16, 18, 20, 22]
os: [ubuntu-20.04, ubuntu-22.04]
- mongodb: [4.4.29, 5.0.26, 6.0.15, 7.0.12]
+ mongodb: [4.4.29, 5.0.26, 6.0.15, 7.0.12, 8.0.0]
include:
- os: ubuntu-20.04 # customize on which matrix the coverage will be collected on
mongodb: 5.0.26
@@ -58,10 +58,10 @@ jobs:
MONGOMS_PREFER_GLOBAL_PATH: 1
FORCE_COLOR: true
steps:
- - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup node
- uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3
+ uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
node-version: ${{ matrix.node }}
@@ -94,11 +94,11 @@ jobs:
MONGOMS_PREFER_GLOBAL_PATH: 1
FORCE_COLOR: true
steps:
- - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup node
- uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3
+ uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
- node-version: 16
+ node-version: 22
- name: Load MongoDB binary cache
id: cache-mongodb-binaries
uses: actions/cache@v4
@@ -106,7 +106,7 @@ jobs:
path: ~/.cache/mongodb-binaries
key: deno-${{ env.MONGOMS_VERSION }}
- name: Setup Deno
- uses: denoland/setup-deno@v1
+ uses: denoland/setup-deno@v2
with:
deno-version: v1.37.x
- run: deno --version
@@ -122,11 +122,11 @@ jobs:
env:
FORCE_COLOR: true
steps:
- - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup node
- uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3
+ uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
- node-version: 16
+ node-version: 22
- run: npm install
- name: Test
run: npm run test-rs
@@ -139,6 +139,6 @@ jobs:
contents: read
steps:
- name: Check out repo
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Dependency review
uses: actions/dependency-review-action@v4
diff --git a/.github/workflows/tidelift-alignment.yml b/.github/workflows/tidelift-alignment.yml
index f79e17a20bf..552493a7cbc 100644
--- a/.github/workflows/tidelift-alignment.yml
+++ b/.github/workflows/tidelift-alignment.yml
@@ -15,11 +15,11 @@ jobs:
if: github.repository == 'Automattic/mongoose'
steps:
- name: Checkout
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup node
- uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3
+ uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
- node-version: 16
+ node-version: 22
- name: Alignment
uses: tidelift/alignment-action@8d7700fe795fc01179c1f9fa05b72a089873027d # main
env:
diff --git a/.github/workflows/tsd.yml b/.github/workflows/tsd.yml
index e5102805477..672bd36229f 100644
--- a/.github/workflows/tsd.yml
+++ b/.github/workflows/tsd.yml
@@ -20,12 +20,12 @@ jobs:
runs-on: ubuntu-latest
name: Lint TS-Files
steps:
- - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup node
- uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3
+ uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
- node-version: 18
+ node-version: 22
- run: npm install
@@ -38,12 +38,12 @@ jobs:
runs-on: ubuntu-latest
name: Test Typescript Types
steps:
- - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup node
- uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3
+ uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0
with:
- node-version: 16
+ node-version: 22
- run: npm install
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8ef63a29ffc..4db645d514e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,111 @@
+8.8.3 / 2024-11-26
+==================
+ * fix: disallow using $where in match
+ * perf: cache results from getAllSubdocs() on saveOptions, only loop through known subdoc properties #15055 #15029
+ * fix(model+query): support overwriteDiscriminatorKey for bulkWrite updateOne and updateMany, allow inferring discriminator key from update #15046 #15040
+
+8.8.2 / 2024-11-18
+==================
+ * fix(model): handle array filters when casting bulkWrite #15036 #14978
+ * fix(model): make diffIndexes() avoid trying to drop default timeseries collection index #15035 #14984
+ * fix: save execution stack in query as string #15039 [durran](https://github.com/durran)
+ * types(cursor): correct asyncIterator and asyncDispose for TypeScript with lib: 'esnext' #15038
+ * docs(migrating_to_8): add note about removing findByIdAndRemove #15024 [dragontaek-lee](https://github.com/dragontaek-lee)
+
+8.8.1 / 2024-11-08
+==================
+ * perf: make a few micro-optimizations to help speed up findOne() #15022 #14906
+ * fix: apply embedded discriminators to subdoc schemas before compiling top level model so middleware applies correctly #15001 #14961
+ * fix(query): add overwriteImmutable option to allow updating immutable properties without disabling strict mode #15000 #8619
+
+8.8.0 / 2024-10-31
+==================
+ * feat: upgrade mongodb -> ~6.10 #14991 #14877
+ * feat(query): add schemaLevelProjections option to query to disable schema-level select: false #14986 #11474
+ * feat: allow defining virtuals on arrays, not just array elements #14955 #2326
+ * feat(model): add applyTimestamps() function to apply all schema timestamps, including subdocuments, to a given POJO #14943 #14698
+ * feat(model): add hideIndexes option to syncIndexes() and cleanIndexes() #14987 #14868
+ * fix(query): make sanitizeFilter disable implicit $in #14985 #14657
+ * fix(model): avoid unhandled error if createIndex() throws a sync error #14995
+ * fix(model): avoid throwing TypeError if bulkSave()'s bulkWrite() fails with a non-BulkWriteError #14993
+ * types: added toJSON:flattenObjectIds effect #14989
+ * types: add `__v` to lean() result type and ModifyResult #14990 #12959
+ * types: use globalThis instead of global for NativeDate #14992 #14988
+ * docs(change-streams): fix markdown syntax highlighting for script output example #14994
+
+
+8.7.3 / 2024-10-25
+==================
+ * fix(cursor): close underlying query cursor when calling destroy() #14982 #14966
+ * types: add JSONSerialized helper that can convert HydratedDocument to JSON output type #14981 #14451
+ * types(model): convert InsertManyResult to interface and remove unnecessary insertedIds override #14977
+ * types(connection): add missing sanitizeFilter option #14975
+ * types: improve goto definition for inferred schema definitions #14968 [forivall](https://github.com/forivall)
+ * docs(migration-guide-v7): correct link to the section "Id Setter" #14973 [rb-ntnx](https://github.com/rb-ntnx)
+
+8.7.2 / 2024-10-17
+==================
+ * fix(document): recursively clear modified subpaths when setting deeply nested subdoc to null #14963 #14952
+ * fix(populate): handle array of ids with parent refPath #14965
+ * types: make Buffers into mongodb.Binary in lean result type to match runtime behavior #14967
+ * types: correct schema type inference when using nested typeKey like type: { type: String } #14956 #14950
+ * types: re-export DeleteResult and UpdateResult from MongoDB Node.js driver #14947 #14946
+ * docs(documents): add section on setting deeply nested properties, including warning about nullish coalescing assignment #14972
+ * docs(model): add more info on acknowledged: false, specifically that Mongoose may return that if the update was empty #14957
+
+8.7.1 / 2024-10-09
+==================
+ * fix: set flattenObjectIds to false when calling toObject() for internal purposes #14938
+ * fix: add mongodb 8 to test matrix #14937
+ * fix: handle buffers stored in MongoDB as EJSON representation with { $binary } #14932
+ * docs: indicate that Mongoose 8.7 is required for full MongoDB 8 support #14937
+
+8.7.0 / 2024-09-27
+==================
+ * feat(model): add Model.applyVirtuals() to apply virtuals to a POJO #14905 #14818
+ * feat: upgrade mongodb -> 6.9.0 #14914
+ * feat(query): cast $rename to string #14887 #3027
+ * feat(SchemaType): add getEmbeddedSchemaType() method to SchemaTypes #14880 #8389
+ * fix(model): throw MongooseBulkSaveIncompleteError if bulkSave() didn't completely succeed #14884 #14763
+ * fix(connection): avoid returning readyState = connected if connection state is stale #14812 #14727
+ * fix: depopulate if push() or addToSet() with an ObjectId on a populated array #14883 #1635
+ * types: make __v a number, only set __v on top-level documents #14892
+
+8.6.4 / 2024-09-26
+==================
+ * fix(document): avoid massive perf degradation when saving new doc with 10 level deep subdocs #14910 #14897
+ * fix(model): skip applying static hooks by default if static name conflicts with aggregate middleware #14904 [dragontaek-lee](https://github.com/dragontaek-lee)
+ * fix(model): filter applying static hooks by default if static name conflicts with mongoose middleware #14908 [dragontaek-lee](https://github.com/dragontaek-lee)
+
+7.8.2 / 2024-09-25
+==================
+ * fix(projection): avoid setting projection to unknown exclusive/inclusive if elemMatch on a Date, ObjectId, etc. #14894 #14893
+
+8.6.3 / 2024-09-17
+==================
+ * fix: make getters convert uuid to string when calling toObject() and toJSON() #14890 #14869
+ * fix: fix missing Aggregate re-exports for ESM #14886 [wongsean](https://github.com/wongsean)
+ * types(document): add generic param to depopulate() to allow updating properties #14891 #14876
+
+6.13.2 / 2024-09-12
+===================
+ * fix(document): make set() respect merge option on deeply nested objects #14870 #14878
+
+8.6.2 / 2024-09-11
+==================
+ * fix: make set merge deeply nested objects #14870 #14861 [ianHeydoc](https://github.com/ianHeydoc)
+ * types: allow arbitrary keys in query filters again (revert #14764) #14874 #14863 #14862 #14842
+ * types: make SchemaType static setters property accessible in TypeScript #14881 #14879
+ * type(inferrawdoctype): infer Date types as JS dates rather than Mongoose SchemaType Date #14882 #14839
+
+8.6.1 / 2024-09-03
+==================
+ * fix(document): avoid unnecessary clone() in applyGetters() that was preventing getters from running on 3-level deep subdocuments #14844 #14840 #14835
+ * fix(model): throw error if bulkSave() did not insert or update any documents #14837 #14763
+ * fix(cursor): throw error in ChangeStream constructor if changeStreamThunk() throws a sync error #14846
+ * types(query): add $expr to RootQuerySelector #14845
+ * docs: update populate.md to fix missing match: { } #14847 [makhoulshbeeb](https://github.com/makhoulshbeeb)
+
8.6.0 / 2024-08-28
==================
* feat: upgrade mongodb -> 6.8.0, handle throwing error on closed cursor in Mongoose with `MongooseError` instead of `MongoCursorExhaustedError` #14813
@@ -93,6 +201,7 @@
==================
* feat(model): add throwOnValidationError option for opting into getting MongooseBulkWriteError if all valid operations succeed in bulkWrite() and insertMany() #14599 #14587 #14572 #13410
+<<<<<<< HEAD
8.4.3 / 2024-06-17
==================
* fix: remove 0x flamegraph files from release
@@ -105,6 +214,11 @@
* fix(connection): fix up some inconsistencies in operation-end event and add to docs #14659 #14648
* types: avoid inferring Boolean, Buffer, ObjectId as Date in schema definitions under certain circumstances #14667 #14630
* docs: add note about parallelism in transations #14647 [fiws](https://github.com/fiws)
+=======
+6.13.1 / 2024-09-06
+===================
+ * fix: remove empty $and, $or, $not that were made empty by scrict mode #14749 #13086 [0x0a0d](https://github.com/0x0a0d)
+>>>>>>> 7.x
6.13.0 / 2024-06-06
===================
@@ -608,7 +722,7 @@
==================
* perf: speed up mapOfSubdocs benchmark by 4x by avoiding unnecessary O(n^2) loop in getPathsToValidate() #13614
* feat: upgrade to MongoDB Node.js driver 5.7.0 #13591
- * feat: add `id` setter which allows modifying `_id` by setting `id` (Note this change was reverted in Mongoose 8) #13517
+ * BREAKING CHANGE: add `id` setter which allows modifying `_id` by setting `id` (Note this change was originally shipped as a `feat`, but later reverted in Mongoose 8 due to compatibility issues) #13517
* feat: support generating custom cast error message with a function #13608 #3162
* feat(query): support MongoDB driver's includeResultMetadata option for findOneAndUpdate #13584 #13539
* feat(connection): add Connection.prototype.removeDb() for removing a related connection #13580 #11821
diff --git a/benchmarks/createDeepNestedDocArray.js b/benchmarks/createDeepNestedDocArray.js
new file mode 100644
index 00000000000..0f3ac6d4a7b
--- /dev/null
+++ b/benchmarks/createDeepNestedDocArray.js
@@ -0,0 +1,37 @@
+'use strict';
+
+const mongoose = require('../');
+
+run().catch(err => {
+ console.error(err);
+ process.exit(-1);
+});
+
+async function run() {
+ await mongoose.connect('mongodb://127.0.0.1:27017/mongoose_benchmark');
+
+ const levels = 12;
+
+ let schema = new mongoose.Schema({ test: { type: String, required: true } });
+ let doc = { test: 'gh-14897' };
+ for (let i = 0; i < levels; ++i) {
+ schema = new mongoose.Schema({ level: Number, subdocs: [schema] });
+ doc = { level: (levels - i), subdocs: [{ ...doc }, { ...doc }] };
+ }
+ const Test = mongoose.model('Test', schema);
+
+ if (!process.env.MONGOOSE_BENCHMARK_SKIP_SETUP) {
+ await Test.deleteMany({});
+ }
+
+ const insertStart = Date.now();
+ await Test.create(doc);
+ const insertEnd = Date.now();
+
+ const results = {
+ 'create() time ms': +(insertEnd - insertStart).toFixed(2)
+ };
+
+ console.log(JSON.stringify(results, null, ' '));
+ process.exit(0);
+}
\ No newline at end of file
diff --git a/benchmarks/saveSimple.js b/benchmarks/saveSimple.js
new file mode 100644
index 00000000000..0029559cdb9
--- /dev/null
+++ b/benchmarks/saveSimple.js
@@ -0,0 +1,57 @@
+'use strict';
+
+const mongoose = require('../');
+
+run().catch(err => {
+ console.error(err);
+ process.exit(-1);
+});
+
+async function run() {
+ await mongoose.connect('mongodb://127.0.0.1:27017/mongoose_benchmark');
+ const FooSchema = new mongoose.Schema({
+ prop1: String,
+ prop2: String,
+ prop3: String,
+ prop4: String,
+ prop5: String,
+ prop6: String,
+ prop7: String,
+ prop8: String,
+ prop9: String,
+ prop10: String
+ });
+ const FooModel = mongoose.model('Foo', FooSchema);
+
+ if (!process.env.MONGOOSE_BENCHMARK_SKIP_SETUP) {
+ await FooModel.deleteMany({});
+ }
+
+ const numIterations = 500;
+ const saveStart = Date.now();
+ for (let i = 0; i < numIterations; ++i) {
+ for (let j = 0; j < 10; ++j) {
+ const doc = new FooModel({
+ prop1: `test ${i}`,
+ prop2: `test ${i}`,
+ prop3: `test ${i}`,
+ prop4: `test ${i}`,
+ prop5: `test ${i}`,
+ prop6: `test ${i}`,
+ prop7: `test ${i}`,
+ prop8: `test ${i}`,
+ prop9: `test ${i}`,
+ prop10: `test ${i}`
+ });
+ await doc.save();
+ }
+ }
+ const saveEnd = Date.now();
+
+ const results = {
+ 'Average save time ms': +((saveEnd - saveStart) / numIterations).toFixed(2)
+ };
+
+ console.log(JSON.stringify(results, null, ' '));
+ process.exit(0);
+}
diff --git a/docs/change-streams.md b/docs/change-streams.md
index cc6d3d36c94..885279050e2 100644
--- a/docs/change-streams.md
+++ b/docs/change-streams.md
@@ -21,7 +21,7 @@ await Person.create({ name: 'Axl Rose' });
The above script will print output that looks like:
-```no-highlight
+```javascript
{
_id: {
_data: '8262408DAC000000012B022C0100296E5A10042890851837DB4792BE6B235E8B85489F46645F6964006462408DAC6F5C42FF5EE087A20004'
diff --git a/docs/compatibility.md b/docs/compatibility.md
index f183a4f67b1..a343a470131 100644
--- a/docs/compatibility.md
+++ b/docs/compatibility.md
@@ -18,6 +18,7 @@ Below are the [semver](http://semver.org/) ranges representing which versions of
| MongoDB Server | Mongoose |
| :------------: | :-------------------------------------: |
+| `8.x` | `^8.7.0` |
| `7.x` | `^7.4.0 \| ^8.0.0` |
| `6.x` | `^6.5.0 \| ^7.0.0 \| ^8.0.0` |
| `5.x` | `^5.13.0` \| `^6.0.0 \| ^7.0.0 \| ^8.0.0`|
diff --git a/docs/documents.md b/docs/documents.md
index 20764c6dbef..6aca0814253 100644
--- a/docs/documents.md
+++ b/docs/documents.md
@@ -8,6 +8,7 @@ to documents as stored in MongoDB. Each document is an instance of its
Documents vs Models
Retrieving
Updating Using save()
+ Setting Nested Properties
Updating Using Queries
Validating
Overwriting
@@ -81,6 +82,54 @@ doc.name = 'foo';
await doc.save(); // Throws DocumentNotFoundError
```
+## Setting Nested Properties
+
+Mongoose documents have a `set()` function that you can use to safely set deeply nested properties.
+
+```javascript
+const schema = new Schema({
+ nested: {
+ subdoc: new Schema({
+ name: String
+ })
+ }
+});
+const TestModel = mongoose.model('Test', schema);
+
+const doc = new TestModel();
+doc.set('nested.subdoc.name', 'John Smith');
+doc.nested.subdoc.name; // 'John Smith'
+```
+
+Mongoose documents also have a `get()` function that lets you safely read deeply nested properties. `get()` lets you avoid having to explicitly check for nullish values, similar to JavaScript's [optional chaining operator `?.`](https://masteringjs.io/tutorials/fundamentals/optional-chaining-array).
+
+```javascript
+const doc2 = new TestModel();
+
+doc2.get('nested.subdoc.name'); // undefined
+doc2.nested?.subdoc?.name; // undefined
+
+doc2.set('nested.subdoc.name', 'Will Smith');
+doc2.get('nested.subdoc.name'); // 'Will Smith'
+```
+
+You can use optional chaining `?.` and nullish coalescing `??` with Mongoose documents.
+However, be careful when using [nullish coalescing assignments `??=`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Nullish_coalescing_assignment) to create nested paths with Mongoose documents.
+
+```javascript
+// The following works fine
+const doc3 = new TestModel();
+doc3.nested.subdoc ??= {};
+doc3.nested.subdoc.name = 'John Smythe';
+
+// The following does **NOT** work.
+// Do not use the following pattern with Mongoose documents.
+const doc4 = new TestModel();
+(doc4.nested.subdoc ??= {}).name = 'Charlie Smith';
+doc.nested.subdoc; // Empty object
+doc.nested.subdoc.name; // undefined.
+```
+
## Updating Using Queries {#updating-using-queries}
The [`save()`](api/model.html#model_Model-save) function is generally the right
diff --git a/docs/middleware.md b/docs/middleware.md
index b74452affbd..87381fad2e0 100644
--- a/docs/middleware.md
+++ b/docs/middleware.md
@@ -31,7 +31,6 @@ In document middleware functions, `this` refers to the document. To access the m
* [validate](api/document.html#document_Document-validate)
* [save](api/model.html#model_Model-save)
-* [remove](api/model.html#model_Model-remove)
* [updateOne](api/document.html#document_Document-updateOne)
* [deleteOne](api/model.html#model_Model-deleteOne)
* [init](api/document.html#document_Document-init) (note: init hooks are [synchronous](#synchronous))
@@ -50,7 +49,6 @@ In query middleware functions, `this` refers to the query.
* [findOneAndDelete](api/query.html#query_Query-findOneAndDelete)
* [findOneAndReplace](api/query.html#query_Query-findOneAndReplace)
* [findOneAndUpdate](api/query.html#query_Query-findOneAndUpdate)
-* [remove](api/model.html#model_Model-remove)
* [replaceOne](api/query.html#query_Query-replaceOne)
* [updateOne](api/query.html#query_Query-updateOne)
* [updateMany](api/query.html#query_Query-updateMany)
@@ -87,7 +85,6 @@ Here are the possible strings that can be passed to `pre()`
* findOneAndUpdate
* init
* insertMany
-* remove
* replaceOne
* save
* update
@@ -400,11 +397,11 @@ Mongoose has both query and document hooks for `deleteOne()`.
```javascript
schema.pre('deleteOne', function() { console.log('Removing!'); });
-// Does **not** print "Removing!". Document middleware for `remove` is not executed by default
+// Does **not** print "Removing!". Document middleware for `deleteOne` is not executed by default
await doc.deleteOne();
// Prints "Removing!"
-Model.remove();
+await Model.deleteOne();
```
You can pass options to [`Schema.pre()`](api.html#schema_Schema-pre)
@@ -418,8 +415,8 @@ schema.pre('deleteOne', { document: true, query: false }, function() {
console.log('Deleting doc!');
});
-// Only query middleware. This will get called when you do `Model.remove()`
-// but not `doc.remove()`.
+// Only query middleware. This will get called when you do `Model.deleteOne()`
+// but not `doc.deleteOne()`.
schema.pre('deleteOne', { query: true, document: false }, function() {
console.log('Deleting!');
});
diff --git a/docs/migrating_to_7.md b/docs/migrating_to_7.md
index ffab6f46e9a..21c3aa377c6 100644
--- a/docs/migrating_to_7.md
+++ b/docs/migrating_to_7.md
@@ -200,7 +200,7 @@ In Mongoose 7, `ObjectId` is now a [JavaScript class](https://masteringjs.io/tut
const oid = new mongoose.Types.ObjectId('0'.repeat(24));
```
-## `id` Setter
+## `id` Setter {#id-setter}
Starting in Mongoose 7.4, Mongoose's built-in `id` virtual (which stores the document's `_id` as a string) has a setter which allows modifying the document's `_id` property via `id`.
diff --git a/docs/migrating_to_8.md b/docs/migrating_to_8.md
index 5914318003e..e2748dcb9cd 100644
--- a/docs/migrating_to_8.md
+++ b/docs/migrating_to_8.md
@@ -87,6 +87,9 @@ In Mongoose 7, `findOneAndRemove()` was an alias for `findOneAndDelete()` that M
Mongoose 8 no longer supports `findOneAndRemove()`.
Use `findOneAndDelete()` instead.
+Similarly, Mongoose 8 no longer supports `findByIdAndRemove()`, which was an alias for `findByIdAndDelete()`.
+Please use `findByIdAndDelete()` instead.
+
## Removed `count()` {#removed-count}
`Model.count()` and `Query.prototype.count()` were removed in Mongoose 8. Use `Model.countDocuments()` and `Query.prototype.countDocuments()` instead.
diff --git a/docs/populate.md b/docs/populate.md
index 0a24878d68d..c17710a397a 100644
--- a/docs/populate.md
+++ b/docs/populate.md
@@ -119,6 +119,33 @@ story.author = author;
console.log(story.author.name); // prints "Ian Fleming"
```
+You can also push documents or POJOs onto a populated array, and Mongoose will add those documents if their `ref` matches.
+
+```javascript
+const fan1 = await Person.create({ name: 'Sean' });
+await Story.updateOne({ title: 'Casino Royale' }, { $push: { fans: { $each: [fan1._id] } } });
+
+const story = await Story.findOne({ title: 'Casino Royale' }).populate('fans');
+story.fans[0].name; // 'Sean'
+
+const fan2 = await Person.create({ name: 'George' });
+story.fans.push(fan2);
+story.fans[1].name; // 'George'
+
+story.fans.push({ name: 'Roger' });
+story.fans[2].name; // 'Roger'
+```
+
+If you push a non-POJO and non-document value, like an ObjectId, Mongoose `>= 8.7.0` will depopulate the entire array.
+
+```javascript
+const fan4 = await Person.create({ name: 'Timothy' });
+story.fans.push(fan4._id); // Push the `_id`, not the full document
+
+story.fans[0].name; // undefined, `fans[0]` is now an ObjectId
+story.fans[0].toString() === fan1._id.toString(); // true
+```
+
## Checking Whether a Field is Populated {#checking-populated}
You can call the `populated()` function to check whether a field is populated.
@@ -246,7 +273,7 @@ the story's `author` will be `null`.
```javascript
const story = await Story.
findOne({ title: 'Casino Royale' }).
- populate({ path: 'author', name: { $ne: 'Ian Fleming' } }).
+ populate({ path: 'author', match: { name: { $ne: 'Ian Fleming' } } }).
exec();
story.author; // `null`
```
diff --git a/docs/subdocs.md b/docs/subdocs.md
index 5bd37433435..5fbfae5109a 100644
--- a/docs/subdocs.md
+++ b/docs/subdocs.md
@@ -38,7 +38,7 @@ doc.child;
```
- - What is a Subdocument?
+ - What is a Subdocument?
- Subdocuments versus Nested Paths
- Subdocument Defaults
- Finding a Subdocument
diff --git a/docs/timestamps.md b/docs/timestamps.md
index 17706a6e0f9..8a722b4b356 100644
--- a/docs/timestamps.md
+++ b/docs/timestamps.md
@@ -236,3 +236,20 @@ await User.findOneAndUpdate({}, { $setOnInsert: { updatedAt: new Date() } }, {
timestamps: { createdAt: true, updatedAt: false }
});
```
+
+## Updating Timestamps
+
+If you need to disable Mongoose's timestamps and update a document's timestamps to a different value using `updateOne()` or `findOneAndUpdate()`, you need to do the following:
+
+1. Set the `timestamps` option to `false` to prevent Mongoose from setting `updatedAt`.
+2. Set `overwriteImmutable` to `true` to allow overwriting `createdAt`, which is an immutable property by default.
+
+```javascript
+const createdAt = new Date('2011-06-01');
+// Update a document's `createdAt` to a custom value.
+// Normally Mongoose would prevent doing this because `createdAt` is immutable.
+await Model.updateOne({ _id: doc._id }, { createdAt }, { overwriteImmutable: true, timestamps: false });
+
+doc = await Model.collection.findOne({ _id: doc._id });
+doc.createdAt.valueOf() === createdAt.valueOf(); // true
+```
diff --git a/index.js b/index.js
index 4f4fa8ebe03..6ebbd5fd5d3 100644
--- a/index.js
+++ b/index.js
@@ -28,6 +28,7 @@ module.exports.modelNames = mongoose.modelNames;
module.exports.plugin = mongoose.plugin;
module.exports.connections = mongoose.connections;
module.exports.version = mongoose.version;
+module.exports.Aggregate = mongoose.Aggregate;
module.exports.Mongoose = mongoose.Mongoose;
module.exports.Schema = mongoose.Schema;
module.exports.SchemaType = mongoose.SchemaType;
diff --git a/lib/cast.js b/lib/cast.js
index 25f60ce7762..03cbb3415c2 100644
--- a/lib/cast.js
+++ b/lib/cast.js
@@ -28,6 +28,7 @@ const ALLOWED_GEOWITHIN_GEOJSON_TYPES = ['Polygon', 'MultiPolygon'];
* @param {Object} [options] the query options
* @param {Boolean|"throw"} [options.strict] Wheter to enable all strict options
* @param {Boolean|"throw"} [options.strictQuery] Enable strict Queries
+ * @param {Boolean} [options.sanitizeFilter] avoid adding implict query selectors ($in)
* @param {Boolean} [options.upsert]
* @param {Query} [context] passed to setters
* @api private
@@ -65,10 +66,11 @@ module.exports = function cast(schema, obj, options, context) {
if (!Array.isArray(val)) {
throw new CastError('Array', val, path);
}
- for (let k = 0; k < val.length; ++k) {
+ for (let k = val.length - 1; k >= 0; k--) {
if (val[k] == null || typeof val[k] !== 'object') {
throw new CastError('Object', val[k], path + '.' + k);
}
+ const beforeCastKeysLength = Object.keys(val[k]).length;
const discriminatorValue = val[k][schema.options.discriminatorKey];
if (discriminatorValue == null) {
val[k] = cast(schema, val[k], options, context);
@@ -76,6 +78,15 @@ module.exports = function cast(schema, obj, options, context) {
const discriminatorSchema = getSchemaDiscriminatorByValue(context.schema, discriminatorValue);
val[k] = cast(discriminatorSchema ? discriminatorSchema : schema, val[k], options, context);
}
+
+ if (Object.keys(val[k]).length === 0 && beforeCastKeysLength !== 0) {
+ val.splice(k, 1);
+ }
+ }
+
+ // delete empty: {$or: []} -> {}
+ if (val.length === 0) {
+ delete obj[path];
}
} else if (path === '$where') {
type = typeof val;
@@ -362,7 +373,7 @@ module.exports = function cast(schema, obj, options, context) {
}
}
- } else if (Array.isArray(val) && ['Buffer', 'Array'].indexOf(schematype.instance) === -1) {
+ } else if (Array.isArray(val) && ['Buffer', 'Array'].indexOf(schematype.instance) === -1 && !options.sanitizeFilter) {
const casted = [];
const valuesArray = val;
diff --git a/lib/connection.js b/lib/connection.js
index 6e52d6ca4a0..0b3ec6ae0ea 100644
--- a/lib/connection.js
+++ b/lib/connection.js
@@ -71,6 +71,9 @@ function Connection(base) {
} else {
this.id = base.nextConnectionId;
}
+
+ // Internal queue of objects `{ fn, ctx, args }` that Mongoose calls when this connection is successfully
+ // opened. In `onOpen()`, Mongoose calls every entry in `_queue` and empties the queue.
this._queue = [];
}
@@ -103,6 +106,15 @@ Object.setPrototypeOf(Connection.prototype, EventEmitter.prototype);
Object.defineProperty(Connection.prototype, 'readyState', {
get: function() {
+ // If connection thinks it is connected, but we haven't received a heartbeat in 2 heartbeat intervals,
+ // that likely means the connection is stale (potentially due to frozen AWS Lambda container)
+ if (
+ this._readyState === STATES.connected &&
+ this._lastHeartbeatAt != null &&
+ typeof this.client?.topology?.s?.description?.heartbeatFrequencyMS === 'number' &&
+ Date.now() - this._lastHeartbeatAt >= this.client.topology.s.description.heartbeatFrequencyMS * 2) {
+ return STATES.disconnected;
+ }
return this._readyState;
},
set: function(val) {
diff --git a/lib/constants.js b/lib/constants.js
index 83a66832b55..3a03bd502fc 100644
--- a/lib/constants.js
+++ b/lib/constants.js
@@ -34,3 +34,40 @@ const queryMiddlewareFunctions = queryOperations.concat([
]);
exports.queryMiddlewareFunctions = queryMiddlewareFunctions;
+
+/*!
+ * ignore
+ */
+
+const aggregateMiddlewareFunctions = [
+ 'aggregate'
+];
+
+exports.aggregateMiddlewareFunctions = aggregateMiddlewareFunctions;
+
+/*!
+ * ignore
+ */
+
+const modelMiddlewareFunctions = [
+ 'bulkWrite',
+ 'createCollection',
+ 'insertMany'
+];
+
+exports.modelMiddlewareFunctions = modelMiddlewareFunctions;
+
+/*!
+ * ignore
+ */
+
+const documentMiddlewareFunctions = [
+ 'validate',
+ 'save',
+ 'remove',
+ 'updateOne',
+ 'deleteOne',
+ 'init'
+];
+
+exports.documentMiddlewareFunctions = documentMiddlewareFunctions;
diff --git a/lib/cursor/aggregationCursor.js b/lib/cursor/aggregationCursor.js
index 5462a6e60e6..fd795526ca1 100644
--- a/lib/cursor/aggregationCursor.js
+++ b/lib/cursor/aggregationCursor.js
@@ -196,6 +196,37 @@ AggregationCursor.prototype.close = async function close() {
this.emit('close');
};
+/**
+ * Marks this cursor as destroyed. Will stop streaming and subsequent calls to
+ * `next()` will error.
+ *
+ * @return {this}
+ * @api private
+ * @method _destroy
+ */
+
+AggregationCursor.prototype._destroy = function _destroy(_err, callback) {
+ let waitForCursor = null;
+ if (!this.cursor) {
+ waitForCursor = new Promise((resolve) => {
+ this.once('cursor', resolve);
+ });
+ } else {
+ waitForCursor = Promise.resolve();
+ }
+
+ waitForCursor
+ .then(() => this.cursor.close())
+ .then(() => {
+ this._closed = true;
+ callback();
+ })
+ .catch(error => {
+ callback(error);
+ });
+ return this;
+};
+
/**
* Get the next document from this cursor. Will return `null` when there are
* no documents left.
diff --git a/lib/cursor/changeStream.js b/lib/cursor/changeStream.js
index 55cdecfcdc2..b41e2379e83 100644
--- a/lib/cursor/changeStream.js
+++ b/lib/cursor/changeStream.js
@@ -5,6 +5,7 @@
*/
const EventEmitter = require('events').EventEmitter;
+const MongooseError = require('../error/mongooseError');
/*!
* ignore
@@ -25,6 +26,7 @@ class ChangeStream extends EventEmitter {
this.bindedEvents = false;
this.pipeline = pipeline;
this.options = options;
+ this.errored = false;
if (options && options.hydrate && !options.model) {
throw new Error(
@@ -33,19 +35,36 @@ class ChangeStream extends EventEmitter {
);
}
+ let syncError = null;
this.$driverChangeStreamPromise = new Promise((resolve, reject) => {
// This wrapper is necessary because of buffering.
- changeStreamThunk((err, driverChangeStream) => {
- if (err != null) {
- this.emit('error', err);
- return reject(err);
- }
+ try {
+ changeStreamThunk((err, driverChangeStream) => {
+ if (err != null) {
+ this.errored = true;
+ this.emit('error', err);
+ return reject(err);
+ }
- this.driverChangeStream = driverChangeStream;
- this.emit('ready');
- resolve();
- });
+ this.driverChangeStream = driverChangeStream;
+ this.emit('ready');
+ resolve();
+ });
+ } catch (err) {
+ syncError = err;
+ this.errored = true;
+ this.emit('error', err);
+ reject(err);
+ }
});
+
+ // Because a ChangeStream is an event emitter, there's no way to register an 'error' handler
+ // that catches errors which occur in the constructor, unless we force sync errors into async
+ // errors with setImmediate(). For cleaner stack trace, we just immediately throw any synchronous
+ // errors that occurred with changeStreamThunk().
+ if (syncError != null) {
+ throw syncError;
+ }
}
_bindEvents() {
@@ -92,10 +111,16 @@ class ChangeStream extends EventEmitter {
}
hasNext(cb) {
+ if (this.errored) {
+ throw new MongooseError('Cannot call hasNext() on errored ChangeStream');
+ }
return this.driverChangeStream.hasNext(cb);
}
next(cb) {
+ if (this.errored) {
+ throw new MongooseError('Cannot call next() on errored ChangeStream');
+ }
if (this.options && this.options.hydrate) {
if (cb != null) {
const originalCb = cb;
@@ -126,16 +151,25 @@ class ChangeStream extends EventEmitter {
}
addListener(event, handler) {
+ if (this.errored) {
+ throw new MongooseError('Cannot call addListener() on errored ChangeStream');
+ }
this._bindEvents();
return super.addListener(event, handler);
}
on(event, handler) {
+ if (this.errored) {
+ throw new MongooseError('Cannot call on() on errored ChangeStream');
+ }
this._bindEvents();
return super.on(event, handler);
}
once(event, handler) {
+ if (this.errored) {
+ throw new MongooseError('Cannot call once() on errored ChangeStream');
+ }
this._bindEvents();
return super.once(event, handler);
}
diff --git a/lib/cursor/queryCursor.js b/lib/cursor/queryCursor.js
index 6f00a316794..5d05868f59e 100644
--- a/lib/cursor/queryCursor.js
+++ b/lib/cursor/queryCursor.js
@@ -10,7 +10,7 @@ const eachAsync = require('../helpers/cursor/eachAsync');
const helpers = require('../queryHelpers');
const kareem = require('kareem');
const immediate = require('../helpers/immediate');
-const { once } = require('node:events');
+const { once } = require('events');
const util = require('util');
/**
@@ -238,6 +238,39 @@ QueryCursor.prototype.close = async function close() {
}
};
+/**
+ * Marks this cursor as destroyed. Will stop streaming and subsequent calls to
+ * `next()` will error.
+ *
+ * @return {this}
+ * @api private
+ * @method _destroy
+ */
+
+QueryCursor.prototype._destroy = function _destroy(_err, callback) {
+ let waitForCursor = null;
+ if (!this.cursor) {
+ waitForCursor = new Promise((resolve) => {
+ this.once('cursor', resolve);
+ });
+ } else {
+ waitForCursor = Promise.resolve();
+ }
+
+ waitForCursor
+ .then(() => {
+ this.cursor.close();
+ })
+ .then(() => {
+ this._closed = true;
+ callback();
+ })
+ .catch(error => {
+ callback(error);
+ });
+ return this;
+};
+
/**
* Rewind this cursor to its uninitialized state. Any options that are present on the cursor will
* remain in effect. Iterating this cursor will cause new queries to be sent to the server, even
diff --git a/lib/document.js b/lib/document.js
index f6a6016c72e..06204519db9 100644
--- a/lib/document.js
+++ b/lib/document.js
@@ -1213,7 +1213,7 @@ Document.prototype.$set = function $set(path, val, type, options) {
this.$__setValue(path, null);
cleanModifiedSubpaths(this, path);
} else {
- return this.$set(val, path, constructing);
+ return this.$set(val, path, constructing, options);
}
const keys = getKeysInSchemaOrder(this.$__schema, val, path);
@@ -2689,7 +2689,7 @@ function _evaluateRequiredFunctions(doc) {
* ignore
*/
-function _getPathsToValidate(doc, pathsToValidate, pathsToSkip) {
+function _getPathsToValidate(doc, pathsToValidate, pathsToSkip, isNestedValidate) {
const doValidateOptions = {};
_evaluateRequiredFunctions(doc);
@@ -2709,35 +2709,40 @@ function _getPathsToValidate(doc, pathsToValidate, pathsToSkip) {
Object.keys(doc.$__.activePaths.getStatePaths('default')).forEach(addToPaths);
function addToPaths(p) { paths.add(p); }
- const subdocs = doc.$getAllSubdocs();
- const modifiedPaths = doc.modifiedPaths();
- for (const subdoc of subdocs) {
- if (subdoc.$basePath) {
- const fullPathToSubdoc = subdoc.$isSingleNested ? subdoc.$__pathRelativeToParent() : subdoc.$__fullPathWithIndexes();
-
- // Remove child paths for now, because we'll be validating the whole
- // subdoc.
- // The following is a faster take on looping through every path in `paths`
- // and checking if the path starts with `fullPathToSubdoc` re: gh-13191
- for (const modifiedPath of subdoc.modifiedPaths()) {
- paths.delete(fullPathToSubdoc + '.' + modifiedPath);
- }
+ if (!isNestedValidate) {
+ // If we're validating a subdocument, all this logic will run anyway on the top-level document, so skip for subdocuments
+ const subdocs = doc.$getAllSubdocs({ useCache: true });
+ const modifiedPaths = doc.modifiedPaths();
+ for (const subdoc of subdocs) {
+ if (subdoc.$basePath) {
+ const fullPathToSubdoc = subdoc.$isSingleNested ? subdoc.$__pathRelativeToParent() : subdoc.$__fullPathWithIndexes();
+
+ // Remove child paths for now, because we'll be validating the whole
+ // subdoc.
+ // The following is a faster take on looping through every path in `paths`
+ // and checking if the path starts with `fullPathToSubdoc` re: gh-13191
+ for (const modifiedPath of subdoc.modifiedPaths()) {
+ paths.delete(fullPathToSubdoc + '.' + modifiedPath);
+ }
- if (doc.$isModified(fullPathToSubdoc, null, modifiedPaths) &&
- !doc.isDirectModified(fullPathToSubdoc) &&
- !doc.$isDefault(fullPathToSubdoc)) {
- paths.add(fullPathToSubdoc);
+ if (doc.$isModified(fullPathToSubdoc, null, modifiedPaths) &&
+ // Avoid using isDirectModified() here because that does additional checks on whether the parent path
+ // is direct modified, which can cause performance issues re: gh-14897
+ !doc.$__.activePaths.getStatePaths('modify').hasOwnProperty(fullPathToSubdoc) &&
+ !doc.$isDefault(fullPathToSubdoc)) {
+ paths.add(fullPathToSubdoc);
- if (doc.$__.pathsToScopes == null) {
- doc.$__.pathsToScopes = {};
- }
- doc.$__.pathsToScopes[fullPathToSubdoc] = subdoc.$isDocumentArrayElement ?
- subdoc.__parentArray :
- subdoc.$parent();
+ if (doc.$__.pathsToScopes == null) {
+ doc.$__.pathsToScopes = {};
+ }
+ doc.$__.pathsToScopes[fullPathToSubdoc] = subdoc.$isDocumentArrayElement ?
+ subdoc.__parentArray :
+ subdoc.$parent();
- doValidateOptions[fullPathToSubdoc] = { skipSchemaValidators: true };
- if (subdoc.$isDocumentArrayElement && subdoc.__index != null) {
- doValidateOptions[fullPathToSubdoc].index = subdoc.__index;
+ doValidateOptions[fullPathToSubdoc] = { skipSchemaValidators: true };
+ if (subdoc.$isDocumentArrayElement && subdoc.__index != null) {
+ doValidateOptions[fullPathToSubdoc].index = subdoc.__index;
+ }
}
}
}
@@ -2972,7 +2977,7 @@ Document.prototype.$__validate = function(pathsToValidate, options, callback) {
paths = [...paths];
doValidateOptionsByPath = {};
} else {
- const pathDetails = _getPathsToValidate(this, pathsToValidate, pathsToSkip);
+ const pathDetails = _getPathsToValidate(this, pathsToValidate, pathsToSkip, options && options._nestedValidate);
paths = shouldValidateModifiedOnly ?
pathDetails[0].filter((path) => this.$isModified(path)) :
pathDetails[0];
@@ -3059,7 +3064,8 @@ Document.prototype.$__validate = function(pathsToValidate, options, callback) {
const doValidateOptions = {
...doValidateOptionsByPath[path],
path: path,
- validateAllPaths
+ validateAllPaths,
+ _nestedValidate: true
};
schemaType.doValidate(val, function(err) {
@@ -3476,48 +3482,13 @@ Document.prototype.$__reset = function reset() {
let _this = this;
// Skip for subdocuments
- const subdocs = !this.$isSubdocument ? this.$getAllSubdocs() : null;
+ const subdocs = !this.$isSubdocument ? this.$getAllSubdocs({ useCache: true }) : null;
if (subdocs && subdocs.length > 0) {
- const resetArrays = new Set();
for (const subdoc of subdocs) {
- const fullPathWithIndexes = subdoc.$__fullPathWithIndexes();
subdoc.$__reset();
- if (this.isModified(fullPathWithIndexes) || isParentInit(fullPathWithIndexes)) {
- if (subdoc.$isDocumentArrayElement) {
- resetArrays.add(subdoc.parentArray());
- } else {
- const parent = subdoc.$parent();
- if (parent === this) {
- this.$__.activePaths.clearPath(subdoc.$basePath);
- } else if (parent != null && parent.$isSubdocument) {
- // If map path underneath subdocument, may end up with a case where
- // map path is modified but parent still needs to be reset. See gh-10295
- parent.$__reset();
- }
- }
- }
- }
-
- for (const array of resetArrays) {
- this.$__.activePaths.clearPath(array.$path());
- array[arrayAtomicsBackupSymbol] = array[arrayAtomicsSymbol];
- array[arrayAtomicsSymbol] = {};
}
}
- function isParentInit(path) {
- path = path.indexOf('.') === -1 ? [path] : path.split('.');
- let cur = '';
- for (let i = 0; i < path.length; ++i) {
- cur += (cur.length ? '.' : '') + path[i];
- if (_this.$__.activePaths[cur] === 'init') {
- return true;
- }
- }
-
- return false;
- }
-
// clear atomics
this.$__dirty().forEach(function(dirt) {
const type = dirt.value;
@@ -3701,6 +3672,7 @@ Document.prototype.$__getArrayPathsToValidate = function() {
/**
* Get all subdocs (by bfs)
*
+ * @param {Object} [options] options. Currently for internal use.
* @return {Array}
* @api public
* @method $getAllSubdocs
@@ -3708,57 +3680,50 @@ Document.prototype.$__getArrayPathsToValidate = function() {
* @instance
*/
-Document.prototype.$getAllSubdocs = function() {
+Document.prototype.$getAllSubdocs = function(options) {
+ if (options?.useCache && this.$__.saveOptions?.__subdocs) {
+ return this.$__.saveOptions.__subdocs;
+ }
+
DocumentArray || (DocumentArray = require('./types/documentArray'));
Embedded = Embedded || require('./types/arraySubdocument');
- function docReducer(doc, seed, path) {
- let val = doc;
- let isNested = false;
- if (path) {
- if (doc instanceof Document && doc[documentSchemaSymbol].paths[path]) {
- val = doc._doc[path];
- } else if (doc instanceof Document && doc[documentSchemaSymbol].nested[path]) {
- val = doc._doc[path];
- isNested = true;
- } else {
- val = doc[path];
+ const subDocs = [];
+ function getSubdocs(doc) {
+ const newSubdocs = [];
+ for (const { path } of doc.$__schema.childSchemas) {
+ const val = doc.$__getValue(path);
+ if (val == null) {
+ continue;
}
- }
- if (val instanceof Embedded) {
- seed.push(val);
- } else if (val instanceof Map) {
- seed = Array.from(val.keys()).reduce(function(seed, path) {
- return docReducer(val.get(path), seed, null);
- }, seed);
- } else if (val && !Array.isArray(val) && val.$isSingleNested) {
- seed = Object.keys(val._doc).reduce(function(seed, path) {
- return docReducer(val, seed, path);
- }, seed);
- seed.push(val);
- } else if (val && utils.isMongooseDocumentArray(val)) {
- val.forEach(function _docReduce(doc) {
- if (!doc || !doc._doc) {
- return;
+ if (val.$__) {
+ newSubdocs.push(val);
+ }
+ if (Array.isArray(val)) {
+ for (const el of val) {
+ if (el != null && el.$__) {
+ newSubdocs.push(el);
+ }
}
- seed = Object.keys(doc._doc).reduce(function(seed, path) {
- return docReducer(doc._doc, seed, path);
- }, seed);
- if (doc instanceof Embedded) {
- seed.push(doc);
+ }
+ if (val instanceof Map) {
+ for (const el of val.values()) {
+ if (el != null && el.$__) {
+ newSubdocs.push(el);
+ }
}
- });
- } else if (isNested && val != null) {
- for (const path of Object.keys(val)) {
- docReducer(val, seed, path);
}
}
- return seed;
+ for (const subdoc of newSubdocs) {
+ getSubdocs(subdoc);
+ }
+ subDocs.push(...newSubdocs);
}
- const subDocs = [];
- for (const path of Object.keys(this._doc)) {
- docReducer(this, subDocs, path);
+ getSubdocs(this);
+
+ if (this.$__.saveOptions) {
+ this.$__.saveOptions.__subdocs = subDocs;
}
return subDocs;
@@ -3856,7 +3821,6 @@ Document.prototype.$toObject = function(options, json) {
// Parent options should only bubble down for subdocuments, not populated docs
options._parentOptions = this.$isSubdocument ? options : null;
- options._skipSingleNestedGetters = false;
// remember the root transform function
// to save it from being overwritten by sub-transform functions
// const originalTransform = options.transform;
@@ -3870,13 +3834,13 @@ Document.prototype.$toObject = function(options, json) {
ret = clone(this._doc, options) || {};
}
- options._skipSingleNestedGetters = true;
const getters = options._calledWithOptions.getters
?? options.getters
?? defaultOptions.getters
?? false;
+
if (getters) {
- applyGetters(this, ret, options);
+ applyGetters(this, ret);
if (options.minimize) {
ret = minimize(ret) || {};
@@ -4187,12 +4151,11 @@ function applyVirtuals(self, json, options, toObjectOptions) {
*
* @param {Document} self
* @param {Object} json
- * @param {Object} [options]
* @return {Object} `json`
* @api private
*/
-function applyGetters(self, json, options) {
+function applyGetters(self, json) {
const schema = self.$__schema;
const paths = Object.keys(schema.paths);
let i = paths.length;
@@ -4228,8 +4191,10 @@ function applyGetters(self, json, options) {
if (branch != null && typeof branch !== 'object') {
break;
} else if (ii === last) {
- const val = self.$get(path);
- branch[part] = clone(val, options);
+ branch[part] = schema.paths[path].applyGetters(
+ branch[part],
+ self
+ );
if (Array.isArray(branch[part]) && schema.paths[path].$embeddedSchemaType) {
for (let i = 0; i < branch[part].length; ++i) {
branch[part][i] = schema.paths[path].$embeddedSchemaType.applyGetters(
diff --git a/lib/drivers/node-mongodb-native/connection.js b/lib/drivers/node-mongodb-native/connection.js
index 6a164bca8b3..641703e4b11 100644
--- a/lib/drivers/node-mongodb-native/connection.js
+++ b/lib/drivers/node-mongodb-native/connection.js
@@ -23,6 +23,11 @@ const utils = require('../../utils');
function NativeConnection() {
MongooseConnection.apply(this, arguments);
this._listening = false;
+ // Tracks the last time (as unix timestamp) the connection received a
+ // serverHeartbeatSucceeded or serverHeartbeatFailed event from the underlying MongoClient.
+ // If we haven't received one in a while (like due to a frozen AWS Lambda container) then
+ // `readyState` is likely stale.
+ this._lastHeartbeatAt = null;
}
/**
@@ -96,7 +101,7 @@ NativeConnection.prototype.useDb = function(name, options) {
if (this.db && this._readyState === STATES.connected) {
wireup();
} else {
- this.once('connected', wireup);
+ this._queue.push({ fn: wireup });
}
function wireup() {
@@ -106,6 +111,7 @@ NativeConnection.prototype.useDb = function(name, options) {
_opts.noListener = options.noListener;
}
newConn.db = _this.client.db(name, _opts);
+ newConn._lastHeartbeatAt = _this._lastHeartbeatAt;
newConn.onOpen();
}
@@ -409,6 +415,9 @@ function _setClient(conn, client, options, dbName) {
}
});
}
+ client.on('serverHeartbeatSucceeded', () => {
+ conn._lastHeartbeatAt = Date.now();
+ });
if (options.monitorCommands) {
client.on('commandStarted', (data) => conn.emit('commandStarted', data));
@@ -417,6 +426,9 @@ function _setClient(conn, client, options, dbName) {
}
conn.onOpen();
+ if (client.topology?.s?.state === 'connected') {
+ conn._lastHeartbeatAt = Date.now();
+ }
for (const i in conn.collections) {
if (utils.object.hasOwnProperty(conn.collections, i)) {
diff --git a/lib/error/browserMissingSchema.js b/lib/error/browserMissingSchema.js
index 608cfd983e4..ffeffc77257 100644
--- a/lib/error/browserMissingSchema.js
+++ b/lib/error/browserMissingSchema.js
@@ -6,11 +6,12 @@
const MongooseError = require('./mongooseError');
+/**
+ * MissingSchema Error constructor.
+ */
class MissingSchemaError extends MongooseError {
- /**
- * MissingSchema Error constructor.
- */
+
constructor() {
super('Schema hasn\'t been registered for document.\n'
+ 'Use mongoose.Document(name, schema)');
diff --git a/lib/error/bulkSaveIncompleteError.js b/lib/error/bulkSaveIncompleteError.js
new file mode 100644
index 00000000000..c4b88e5d7bb
--- /dev/null
+++ b/lib/error/bulkSaveIncompleteError.js
@@ -0,0 +1,44 @@
+/*!
+ * Module dependencies.
+ */
+
+'use strict';
+
+const MongooseError = require('./mongooseError');
+
+
+/**
+ * If the underwriting `bulkWrite()` for `bulkSave()` succeeded, but wasn't able to update or
+ * insert all documents, we throw this error.
+ *
+ * @api private
+ */
+
+class MongooseBulkSaveIncompleteError extends MongooseError {
+ constructor(modelName, documents, bulkWriteResult) {
+ const matchedCount = bulkWriteResult?.matchedCount ?? 0;
+ const insertedCount = bulkWriteResult?.insertedCount ?? 0;
+ let preview = documents.map(doc => doc._id).join(', ');
+ if (preview.length > 100) {
+ preview = preview.slice(0, 100) + '...';
+ }
+
+ const numDocumentsNotUpdated = documents.length - matchedCount - insertedCount;
+ super(`${modelName}.bulkSave() was not able to update ${numDocumentsNotUpdated} of the given documents due to incorrect version or optimistic concurrency, document ids: ${preview}`);
+
+ this.modelName = modelName;
+ this.documents = documents;
+ this.bulkWriteResult = bulkWriteResult;
+ this.numDocumentsNotUpdated = numDocumentsNotUpdated;
+ }
+}
+
+Object.defineProperty(MongooseBulkSaveIncompleteError.prototype, 'name', {
+ value: 'MongooseBulkSaveIncompleteError'
+});
+
+/*!
+ * exports
+ */
+
+module.exports = MongooseBulkSaveIncompleteError;
diff --git a/lib/error/divergentArray.js b/lib/error/divergentArray.js
index f266dbde449..bc3f1816264 100644
--- a/lib/error/divergentArray.js
+++ b/lib/error/divergentArray.js
@@ -7,12 +7,14 @@
const MongooseError = require('./mongooseError');
+/**
+ * DivergentArrayError constructor.
+ * @param {Array} paths
+ * @api private
+ */
+
class DivergentArrayError extends MongooseError {
- /**
- * DivergentArrayError constructor.
- * @param {Array} paths
- * @api private
- */
+
constructor(paths) {
const msg = 'For your own good, using `document.save()` to update an array '
+ 'which was selected using an $elemMatch projection OR '
diff --git a/lib/error/invalidSchemaOption.js b/lib/error/invalidSchemaOption.js
index 089dc6a03ef..9e7e4ff4f17 100644
--- a/lib/error/invalidSchemaOption.js
+++ b/lib/error/invalidSchemaOption.js
@@ -7,12 +7,14 @@
const MongooseError = require('./mongooseError');
+/**
+ * InvalidSchemaOption Error constructor.
+ * @param {String} name
+ * @api private
+ */
+
class InvalidSchemaOptionError extends MongooseError {
- /**
- * InvalidSchemaOption Error constructor.
- * @param {String} name
- * @api private
- */
+
constructor(name, option) {
const msg = `Cannot create use schema for property "${name}" because the schema has the ${option} option enabled.`;
super(msg);
diff --git a/lib/error/missingSchema.js b/lib/error/missingSchema.js
index 2b3bf242526..790f7853848 100644
--- a/lib/error/missingSchema.js
+++ b/lib/error/missingSchema.js
@@ -7,12 +7,14 @@
const MongooseError = require('./mongooseError');
+/**
+ * MissingSchema Error constructor.
+ * @param {String} name
+ * @api private
+ */
+
class MissingSchemaError extends MongooseError {
- /**
- * MissingSchema Error constructor.
- * @param {String} name
- * @api private
- */
+
constructor(name) {
const msg = 'Schema hasn\'t been registered for model "' + name + '".\n'
+ 'Use mongoose.model(name, schema)';
diff --git a/lib/error/notFound.js b/lib/error/notFound.js
index 19a22f3a101..87fdd8bc649 100644
--- a/lib/error/notFound.js
+++ b/lib/error/notFound.js
@@ -7,11 +7,13 @@
const MongooseError = require('./mongooseError');
const util = require('util');
+/**
+ * OverwriteModel Error constructor.
+ * @api private
+ */
+
class DocumentNotFoundError extends MongooseError {
- /**
- * OverwriteModel Error constructor.
- * @api private
- */
+
constructor(filter, model, numAffected, result) {
let msg;
const messages = MongooseError.messages;
diff --git a/lib/error/objectExpected.js b/lib/error/objectExpected.js
index 9f7a8116618..bd89ffc77e1 100644
--- a/lib/error/objectExpected.js
+++ b/lib/error/objectExpected.js
@@ -6,15 +6,16 @@
const MongooseError = require('./mongooseError');
+/**
+ * Strict mode error constructor
+ *
+ * @param {string} type
+ * @param {string} value
+ * @api private
+ */
class ObjectExpectedError extends MongooseError {
- /**
- * Strict mode error constructor
- *
- * @param {string} type
- * @param {string} value
- * @api private
- */
+
constructor(path, val) {
const typeDescription = Array.isArray(val) ? 'array' : 'primitive value';
super('Tried to set nested object field `' + path +
diff --git a/lib/error/objectParameter.js b/lib/error/objectParameter.js
index b3f5b80849d..0a2108e5c9b 100644
--- a/lib/error/objectParameter.js
+++ b/lib/error/objectParameter.js
@@ -6,16 +6,18 @@
const MongooseError = require('./mongooseError');
+/**
+ * Constructor for errors that happen when a parameter that's expected to be
+ * an object isn't an object
+ *
+ * @param {Any} value
+ * @param {String} paramName
+ * @param {String} fnName
+ * @api private
+ */
+
class ObjectParameterError extends MongooseError {
- /**
- * Constructor for errors that happen when a parameter that's expected to be
- * an object isn't an object
- *
- * @param {Any} value
- * @param {String} paramName
- * @param {String} fnName
- * @api private
- */
+
constructor(value, paramName, fnName) {
super('Parameter "' + paramName + '" to ' + fnName +
'() must be an object, got "' + value.toString() + '" (type ' + typeof value + ')');
diff --git a/lib/error/overwriteModel.js b/lib/error/overwriteModel.js
index 8904e4e74b3..ef828f91731 100644
--- a/lib/error/overwriteModel.js
+++ b/lib/error/overwriteModel.js
@@ -7,13 +7,14 @@
const MongooseError = require('./mongooseError');
+/**
+ * OverwriteModel Error constructor.
+ * @param {String} name
+ * @api private
+ */
class OverwriteModelError extends MongooseError {
- /**
- * OverwriteModel Error constructor.
- * @param {String} name
- * @api private
- */
+
constructor(name) {
super('Cannot overwrite `' + name + '` model once compiled.');
}
diff --git a/lib/error/parallelSave.js b/lib/error/parallelSave.js
index 25e12481d49..fd554fa3bbc 100644
--- a/lib/error/parallelSave.js
+++ b/lib/error/parallelSave.js
@@ -6,13 +6,16 @@
const MongooseError = require('./mongooseError');
+
+/**
+ * ParallelSave Error constructor.
+ *
+ * @param {Document} doc
+ * @api private
+ */
+
class ParallelSaveError extends MongooseError {
- /**
- * ParallelSave Error constructor.
- *
- * @param {Document} doc
- * @api private
- */
+
constructor(doc) {
const msg = 'Can\'t save() the same doc multiple times in parallel. Document: ';
super(msg + doc._doc._id);
diff --git a/lib/error/parallelValidate.js b/lib/error/parallelValidate.js
index 84b7940d6df..d70e296e869 100644
--- a/lib/error/parallelValidate.js
+++ b/lib/error/parallelValidate.js
@@ -7,13 +7,15 @@
const MongooseError = require('./mongooseError');
+/**
+ * ParallelValidate Error constructor.
+ *
+ * @param {Document} doc
+ * @api private
+ */
+
class ParallelValidateError extends MongooseError {
- /**
- * ParallelValidate Error constructor.
- *
- * @param {Document} doc
- * @api private
- */
+
constructor(doc) {
const msg = 'Can\'t validate() the same doc multiple times in parallel. Document: ';
super(msg + doc._doc._id);
diff --git a/lib/error/setOptionError.js b/lib/error/setOptionError.js
index b38a0d30244..369096fd306 100644
--- a/lib/error/setOptionError.js
+++ b/lib/error/setOptionError.js
@@ -8,13 +8,15 @@ const MongooseError = require('./mongooseError');
const util = require('util');
const combinePathErrors = require('../helpers/error/combinePathErrors');
+/**
+ * Mongoose.set Error
+ *
+ * @api private
+ * @inherits MongooseError
+ */
+
class SetOptionError extends MongooseError {
- /**
- * Mongoose.set Error
- *
- * @api private
- * @inherits MongooseError
- */
+
constructor() {
super('');
diff --git a/lib/error/strict.js b/lib/error/strict.js
index 6cf4cf91141..eda7d9ae6f5 100644
--- a/lib/error/strict.js
+++ b/lib/error/strict.js
@@ -6,17 +6,19 @@
const MongooseError = require('./mongooseError');
+/**
+ * Strict mode error constructor
+ *
+ * @param {String} path
+ * @param {String} [msg]
+ * @param {Boolean} [immutable]
+ * @inherits MongooseError
+ * @api private
+ */
+
class StrictModeError extends MongooseError {
- /**
- * Strict mode error constructor
- *
- * @param {String} path
- * @param {String} [msg]
- * @param {Boolean} [immutable]
- * @inherits MongooseError
- * @api private
- */
+
constructor(path, msg, immutable) {
msg = msg || 'Field `' + path + '` is not in schema and strict ' +
'mode is set to throw.';
diff --git a/lib/error/strictPopulate.js b/lib/error/strictPopulate.js
index 288799897bc..d554d71271d 100644
--- a/lib/error/strictPopulate.js
+++ b/lib/error/strictPopulate.js
@@ -6,15 +6,17 @@
const MongooseError = require('./mongooseError');
+/**
+ * Strict mode error constructor
+ *
+ * @param {String} path
+ * @param {String} [msg]
+ * @inherits MongooseError
+ * @api private
+ */
+
class StrictPopulateError extends MongooseError {
- /**
- * Strict mode error constructor
- *
- * @param {String} path
- * @param {String} [msg]
- * @inherits MongooseError
- * @api private
- */
+
constructor(path, msg) {
msg = msg || 'Cannot populate path `' + path + '` because it is not in your schema. ' + 'Set the `strictPopulate` option to false to override.';
super(msg);
diff --git a/lib/error/validation.js b/lib/error/validation.js
index 5e222e980f9..faa4ea799aa 100644
--- a/lib/error/validation.js
+++ b/lib/error/validation.js
@@ -9,14 +9,16 @@ const getConstructorName = require('../helpers/getConstructorName');
const util = require('util');
const combinePathErrors = require('../helpers/error/combinePathErrors');
+/**
+ * Document Validation Error
+ *
+ * @api private
+ * @param {Document} [instance]
+ * @inherits MongooseError
+ */
+
class ValidationError extends MongooseError {
- /**
- * Document Validation Error
- *
- * @api private
- * @param {Document} [instance]
- * @inherits MongooseError
- */
+
constructor(instance) {
let _message;
if (getConstructorName(instance) === 'model') {
diff --git a/lib/error/validator.js b/lib/error/validator.js
index f7ee2ef4761..38f98f0087d 100644
--- a/lib/error/validator.js
+++ b/lib/error/validator.js
@@ -6,15 +6,16 @@
const MongooseError = require('./mongooseError');
+/**
+ * Schema validator error
+ *
+ * @param {Object} properties
+ * @param {Document} doc
+ * @api private
+ */
class ValidatorError extends MongooseError {
- /**
- * Schema validator error
- *
- * @param {Object} properties
- * @param {Document} doc
- * @api private
- */
+
constructor(properties, doc) {
let msg = properties.message;
if (!msg) {
diff --git a/lib/error/version.js b/lib/error/version.js
index 6bc2b5d3af5..4eb8054cdfb 100644
--- a/lib/error/version.js
+++ b/lib/error/version.js
@@ -6,15 +6,17 @@
const MongooseError = require('./mongooseError');
+/**
+ * Version Error constructor.
+ *
+ * @param {Document} doc
+ * @param {Number} currentVersion
+ * @param {Array} modifiedPaths
+ * @api private
+ */
+
class VersionError extends MongooseError {
- /**
- * Version Error constructor.
- *
- * @param {Document} doc
- * @param {Number} currentVersion
- * @param {Array} modifiedPaths
- * @api private
- */
+
constructor(doc, currentVersion, modifiedPaths) {
const modifiedPathsStr = modifiedPaths.join(', ');
super('No matching document found for id "' + doc._doc._id +
diff --git a/lib/helpers/clone.js b/lib/helpers/clone.js
index b0e37f6cfba..09204b8c8a4 100644
--- a/lib/helpers/clone.js
+++ b/lib/helpers/clone.js
@@ -40,11 +40,6 @@ function clone(obj, options, isArrayChild) {
if (isMongooseObject(obj)) {
if (options) {
- // Single nested subdocs should apply getters later in `applyGetters()`
- // when calling `toObject()`. See gh-7442, gh-8295
- if (options._skipSingleNestedGetters && obj.$isSingleNested) {
- options._calledWithOptions = Object.assign({}, options._calledWithOptions || {}, { getters: false });
- }
if (options.retainDocuments && obj.$__ != null) {
const clonedDoc = obj.$clone();
if (obj.__index != null) {
diff --git a/lib/helpers/document/applyTimestamps.js b/lib/helpers/document/applyTimestamps.js
new file mode 100644
index 00000000000..425e144c867
--- /dev/null
+++ b/lib/helpers/document/applyTimestamps.js
@@ -0,0 +1,105 @@
+'use strict';
+
+const handleTimestampOption = require('../schema/handleTimestampOption');
+const mpath = require('mpath');
+
+module.exports = applyTimestamps;
+
+/**
+ * Apply a given schema's timestamps to the given POJO
+ *
+ * @param {Schema} schema
+ * @param {Object} obj
+ * @param {Object} [options]
+ * @param {Boolean} [options.isUpdate=false] if true, treat this as an update: just set updatedAt, skip setting createdAt. If false, set both createdAt and updatedAt
+ * @param {Function} [options.currentTime] if set, Mongoose will call this function to get the current time.
+ */
+
+function applyTimestamps(schema, obj, options) {
+ if (obj == null) {
+ return obj;
+ }
+
+ applyTimestampsToChildren(schema, obj, options);
+ return applyTimestampsToDoc(schema, obj, options);
+}
+
+/**
+ * Apply timestamps to any subdocuments
+ *
+ * @param {Schema} schema subdocument schema
+ * @param {Object} res subdocument
+ * @param {Object} [options]
+ * @param {Boolean} [options.isUpdate=false] if true, treat this as an update: just set updatedAt, skip setting createdAt. If false, set both createdAt and updatedAt
+ * @param {Function} [options.currentTime] if set, Mongoose will call this function to get the current time.
+ */
+
+function applyTimestampsToChildren(schema, res, options) {
+ for (const childSchema of schema.childSchemas) {
+ const _path = childSchema.model.path;
+ const _schema = childSchema.schema;
+ if (!_path) {
+ continue;
+ }
+ const _obj = mpath.get(_path, res);
+ if (_obj == null || (Array.isArray(_obj) && _obj.flat(Infinity).length === 0)) {
+ continue;
+ }
+
+ applyTimestamps(_schema, _obj, options);
+ }
+}
+
+/**
+ * Apply timestamps to a given document. Does not apply timestamps to subdocuments: use `applyTimestampsToChildren` instead
+ *
+ * @param {Schema} schema
+ * @param {Object} obj
+ * @param {Object} [options]
+ * @param {Boolean} [options.isUpdate=false] if true, treat this as an update: just set updatedAt, skip setting createdAt. If false, set both createdAt and updatedAt
+ * @param {Function} [options.currentTime] if set, Mongoose will call this function to get the current time.
+ */
+
+function applyTimestampsToDoc(schema, obj, options) {
+ if (obj == null || typeof obj !== 'object') {
+ return;
+ }
+ if (Array.isArray(obj)) {
+ for (const el of obj) {
+ applyTimestampsToDoc(schema, el, options);
+ }
+ return;
+ }
+
+ if (schema.discriminators && Object.keys(schema.discriminators).length > 0) {
+ for (const discriminatorKey of Object.keys(schema.discriminators)) {
+ const discriminator = schema.discriminators[discriminatorKey];
+ const key = discriminator.discriminatorMapping.key;
+ const value = discriminator.discriminatorMapping.value;
+ if (obj[key] == value) {
+ schema = discriminator;
+ break;
+ }
+ }
+ }
+
+ const createdAt = handleTimestampOption(schema.options.timestamps, 'createdAt');
+ const updatedAt = handleTimestampOption(schema.options.timestamps, 'updatedAt');
+ const currentTime = options?.currentTime;
+
+ let ts = null;
+ if (currentTime != null) {
+ ts = currentTime();
+ } else if (schema.base?.now) {
+ ts = schema.base.now();
+ } else {
+ ts = new Date();
+ }
+
+ if (createdAt && obj[createdAt] == null && !options?.isUpdate) {
+ obj[createdAt] = ts;
+ }
+ if (updatedAt) {
+ obj[updatedAt] = ts;
+ }
+}
diff --git a/lib/helpers/document/applyVirtuals.js b/lib/helpers/document/applyVirtuals.js
new file mode 100644
index 00000000000..5fbe7ca82ba
--- /dev/null
+++ b/lib/helpers/document/applyVirtuals.js
@@ -0,0 +1,146 @@
+'use strict';
+
+const mpath = require('mpath');
+
+module.exports = applyVirtuals;
+
+/**
+ * Apply a given schema's virtuals to a given POJO
+ *
+ * @param {Schema} schema
+ * @param {Object} obj
+ * @param {Array} [virtuals] optional whitelist of virtuals to apply
+ * @returns
+ */
+
+function applyVirtuals(schema, obj, virtuals) {
+ if (obj == null) {
+ return obj;
+ }
+
+ let virtualsForChildren = virtuals;
+ let toApply = null;
+
+ if (Array.isArray(virtuals)) {
+ virtualsForChildren = [];
+ toApply = [];
+ for (const virtual of virtuals) {
+ if (virtual.length === 1) {
+ toApply.push(virtual[0]);
+ } else {
+ virtualsForChildren.push(virtual);
+ }
+ }
+ }
+
+ applyVirtualsToChildren(schema, obj, virtualsForChildren);
+ return applyVirtualsToDoc(schema, obj, toApply);
+}
+
+/**
+ * Apply virtuals to any subdocuments
+ *
+ * @param {Schema} schema subdocument schema
+ * @param {Object} res subdocument
+ * @param {Array} [virtuals] optional whitelist of virtuals to apply
+ */
+
+function applyVirtualsToChildren(schema, res, virtuals) {
+ let attachedVirtuals = false;
+ for (const childSchema of schema.childSchemas) {
+ const _path = childSchema.model.path;
+ const _schema = childSchema.schema;
+ if (!_path) {
+ continue;
+ }
+ const _obj = mpath.get(_path, res);
+ if (_obj == null || (Array.isArray(_obj) && _obj.flat(Infinity).length === 0)) {
+ continue;
+ }
+
+ let virtualsForChild = null;
+ if (Array.isArray(virtuals)) {
+ virtualsForChild = [];
+ for (const virtual of virtuals) {
+ if (virtual[0] == _path) {
+ virtualsForChild.push(virtual.slice(1));
+ }
+ }
+
+ if (virtualsForChild.length === 0) {
+ continue;
+ }
+ }
+
+ applyVirtuals(_schema, _obj, virtualsForChild);
+ attachedVirtuals = true;
+ }
+
+ if (virtuals && virtuals.length && !attachedVirtuals) {
+ applyVirtualsToDoc(schema, res, virtuals);
+ }
+}
+
+/**
+ * Apply virtuals to a given document. Does not apply virtuals to subdocuments: use `applyVirtualsToChildren` instead
+ *
+ * @param {Schema} schema
+ * @param {Object} doc
+ * @param {Array} [virtuals] optional whitelist of virtuals to apply
+ * @returns
+ */
+
+function applyVirtualsToDoc(schema, obj, virtuals) {
+ if (obj == null || typeof obj !== 'object') {
+ return;
+ }
+ if (Array.isArray(obj)) {
+ for (const el of obj) {
+ applyVirtualsToDoc(schema, el, virtuals);
+ }
+ return;
+ }
+
+ if (schema.discriminators && Object.keys(schema.discriminators).length > 0) {
+ for (const discriminatorKey of Object.keys(schema.discriminators)) {
+ const discriminator = schema.discriminators[discriminatorKey];
+ const key = discriminator.discriminatorMapping.key;
+ const value = discriminator.discriminatorMapping.value;
+ if (obj[key] == value) {
+ schema = discriminator;
+ break;
+ }
+ }
+ }
+
+ if (virtuals == null) {
+ virtuals = Object.keys(schema.virtuals);
+ }
+ for (const virtual of virtuals) {
+ if (schema.virtuals[virtual] == null) {
+ continue;
+ }
+ const virtualType = schema.virtuals[virtual];
+ const sp = Array.isArray(virtual)
+ ? virtual
+ : virtual.indexOf('.') === -1
+ ? [virtual]
+ : virtual.split('.');
+ let cur = obj;
+ for (let i = 0; i < sp.length - 1; ++i) {
+ cur[sp[i]] = sp[i] in cur ? cur[sp[i]] : {};
+ cur = cur[sp[i]];
+ }
+ let val = virtualType.applyGetters(cur[sp[sp.length - 1]], obj);
+ const isPopulateVirtual =
+ virtualType.options && (virtualType.options.ref || virtualType.options.refPath);
+ if (isPopulateVirtual && val === undefined) {
+ if (virtualType.options.justOne) {
+ val = null;
+ } else {
+ val = [];
+ }
+ }
+ cur[sp[sp.length - 1]] = val;
+ }
+}
diff --git a/lib/helpers/document/cleanModifiedSubpaths.js b/lib/helpers/document/cleanModifiedSubpaths.js
index 43c225e4fd2..c12b5e2eea5 100644
--- a/lib/helpers/document/cleanModifiedSubpaths.js
+++ b/lib/helpers/document/cleanModifiedSubpaths.js
@@ -25,11 +25,21 @@ module.exports = function cleanModifiedSubpaths(doc, path, options) {
++deleted;
if (doc.$isSubdocument) {
- const owner = doc.ownerDocument();
- const fullPath = doc.$__fullPath(modifiedPath);
- owner.$__.activePaths.clearPath(fullPath);
+ cleanParent(doc, modifiedPath);
}
}
}
return deleted;
};
+
+function cleanParent(doc, path, seen = new Set()) {
+ if (seen.has(doc)) {
+ throw new Error('Infinite subdocument loop: subdoc with _id ' + doc._id + ' is a parent of itself');
+ }
+ const parent = doc.$parent();
+ const newPath = doc.$__pathRelativeToParent(void 0, false) + '.' + path;
+ parent.$__.activePaths.clearPath(newPath);
+ if (parent.$isSubdocument) {
+ cleanParent(parent, newPath, seen);
+ }
+}
diff --git a/lib/helpers/indexes/isTimeseriesIndex.js b/lib/helpers/indexes/isTimeseriesIndex.js
new file mode 100644
index 00000000000..0a4512b91ed
--- /dev/null
+++ b/lib/helpers/indexes/isTimeseriesIndex.js
@@ -0,0 +1,16 @@
+'use strict';
+
+/**
+ * Returns `true` if the given index matches the schema's `timestamps` options
+ */
+
+module.exports = function isTimeseriesIndex(dbIndex, schemaOptions) {
+ if (schemaOptions.timeseries == null) {
+ return false;
+ }
+ const { timeField, metaField } = schemaOptions.timeseries;
+ if (typeof timeField !== 'string' || typeof metaField !== 'string') {
+ return false;
+ }
+ return Object.keys(dbIndex.key).length === 2 && dbIndex.key[timeField] === 1 && dbIndex.key[metaField] === 1;
+};
diff --git a/lib/helpers/isBsonType.js b/lib/helpers/isBsonType.js
index f75fd40169d..ab6ceba58e6 100644
--- a/lib/helpers/isBsonType.js
+++ b/lib/helpers/isBsonType.js
@@ -7,8 +7,7 @@
function isBsonType(obj, typename) {
return (
- typeof obj === 'object' &&
- obj !== null &&
+ obj != null &&
obj._bsontype === typename
);
}
diff --git a/lib/helpers/model/applyStaticHooks.js b/lib/helpers/model/applyStaticHooks.js
index 957e94f2288..40116462f26 100644
--- a/lib/helpers/model/applyStaticHooks.js
+++ b/lib/helpers/model/applyStaticHooks.js
@@ -1,7 +1,16 @@
'use strict';
-const middlewareFunctions = require('../../constants').queryMiddlewareFunctions;
const promiseOrCallback = require('../promiseOrCallback');
+const { queryMiddlewareFunctions, aggregateMiddlewareFunctions, modelMiddlewareFunctions, documentMiddlewareFunctions } = require('../../constants');
+
+const middlewareFunctions = Array.from(
+ new Set([
+ ...queryMiddlewareFunctions,
+ ...aggregateMiddlewareFunctions,
+ ...modelMiddlewareFunctions,
+ ...documentMiddlewareFunctions
+ ])
+);
module.exports = function applyStaticHooks(model, hooks, statics) {
const kareemOptions = {
@@ -9,8 +18,11 @@ module.exports = function applyStaticHooks(model, hooks, statics) {
numCallbackParams: 1
};
+ model.$__insertMany = hooks.createWrapper('insertMany',
+ model.$__insertMany, model, kareemOptions);
+
hooks = hooks.filter(hook => {
- // If the custom static overwrites an existing query middleware, don't apply
+ // If the custom static overwrites an existing middleware, don't apply
// middleware to it by default. This avoids a potential backwards breaking
// change with plugins like `mongoose-delete` that use statics to overwrite
// built-in Mongoose functions.
@@ -20,9 +32,6 @@ module.exports = function applyStaticHooks(model, hooks, statics) {
return hook.model !== false;
});
- model.$__insertMany = hooks.createWrapper('insertMany',
- model.$__insertMany, model, kareemOptions);
-
for (const key of Object.keys(statics)) {
if (hooks.hasHooks(key)) {
const original = model[key];
diff --git a/lib/helpers/model/castBulkWrite.js b/lib/helpers/model/castBulkWrite.js
index 1afb36987fa..6d7a780a812 100644
--- a/lib/helpers/model/castBulkWrite.js
+++ b/lib/helpers/model/castBulkWrite.js
@@ -103,7 +103,9 @@ module.exports = function castBulkWrite(originalModel, op, options) {
});
op['updateOne']['update'] = castUpdate(model.schema, update, {
strict: strict,
- upsert: op['updateOne'].upsert
+ upsert: op['updateOne'].upsert,
+ arrayFilters: op['updateOne'].arrayFilters,
+ overwriteDiscriminatorKey: op['updateOne'].overwriteDiscriminatorKey
}, model, op['updateOne']['filter']);
} catch (error) {
return callback(error, null);
@@ -162,7 +164,9 @@ module.exports = function castBulkWrite(originalModel, op, options) {
op['updateMany']['update'] = castUpdate(model.schema, op['updateMany']['update'], {
strict: strict,
- upsert: op['updateMany'].upsert
+ upsert: op['updateMany'].upsert,
+ arrayFilters: op['updateMany'].arrayFilters,
+ overwriteDiscriminatorKey: op['updateMany'].overwriteDiscriminatorKey
}, model, op['updateMany']['filter']);
} catch (error) {
return callback(error, null);
diff --git a/lib/helpers/populate/assignVals.js b/lib/helpers/populate/assignVals.js
index 9aff29fd538..62b3863b583 100644
--- a/lib/helpers/populate/assignVals.js
+++ b/lib/helpers/populate/assignVals.js
@@ -249,7 +249,7 @@ function numDocs(v) {
function valueFilter(val, assignmentOpts, populateOptions, allIds) {
const userSpecifiedTransform = typeof populateOptions.transform === 'function';
- const transform = userSpecifiedTransform ? populateOptions.transform : noop;
+ const transform = userSpecifiedTransform ? populateOptions.transform : v => v;
if (Array.isArray(val)) {
// find logic
const ret = [];
@@ -341,7 +341,3 @@ function isPopulatedObject(obj) {
obj.$__ != null ||
leanPopulateMap.has(obj);
}
-
-function noop(v) {
- return v;
-}
diff --git a/lib/helpers/populate/getModelsMapForPopulate.js b/lib/helpers/populate/getModelsMapForPopulate.js
index 2b2a2b40312..bd748ed0722 100644
--- a/lib/helpers/populate/getModelsMapForPopulate.js
+++ b/lib/helpers/populate/getModelsMapForPopulate.js
@@ -184,6 +184,15 @@ module.exports = function getModelsMapForPopulate(model, docs, options) {
if (hasMatchFunction) {
match = match.call(doc, doc);
}
+ if (Array.isArray(match)) {
+ for (const item of match) {
+ if (item != null && item.$where) {
+ throw new MongooseError('Cannot use $where filter with populate() match');
+ }
+ }
+ } else if (match != null && match.$where != null) {
+ throw new MongooseError('Cannot use $where filter with populate() match');
+ }
data.match = match;
data.hasMatchFunction = hasMatchFunction;
data.isRefPath = isRefPath;
@@ -447,6 +456,16 @@ function _virtualPopulate(model, docs, options, _virtualRes) {
data.match = match;
data.hasMatchFunction = hasMatchFunction;
+ if (Array.isArray(match)) {
+ for (const item of match) {
+ if (item != null && item.$where) {
+ throw new MongooseError('Cannot use $where filter with populate() match');
+ }
+ }
+ } else if (match != null && match.$where != null) {
+ throw new MongooseError('Cannot use $where filter with populate() match');
+ }
+
// Get local fields
const ret = _getLocalFieldValues(doc, localField, model, options, virtual);
@@ -478,9 +497,10 @@ function addModelNamesToMap(model, map, available, modelNames, options, data, re
return;
}
- let k = modelNames.length;
+ const flatModelNames = utils.array.flatten(modelNames);
+ let k = flatModelNames.length;
while (k--) {
- let modelName = modelNames[k];
+ let modelName = flatModelNames[k];
if (modelName == null) {
continue;
}
@@ -503,11 +523,10 @@ function addModelNamesToMap(model, map, available, modelNames, options, data, re
}
let ids = ret;
- const flat = Array.isArray(ret) ? utils.array.flatten(ret) : [];
const modelNamesForRefPath = data.modelNamesInOrder ? data.modelNamesInOrder : modelNames;
- if (data.isRefPath && Array.isArray(ret) && flat.length === modelNamesForRefPath.length) {
- ids = flat.filter((val, i) => modelNamesForRefPath[i] === modelName);
+ if (data.isRefPath && Array.isArray(ret) && ret.length === modelNamesForRefPath.length) {
+ ids = matchIdsToRefPaths(ret, modelNamesForRefPath, modelName);
}
const perDocumentLimit = options.perDocumentLimit == null ?
@@ -569,6 +588,20 @@ function _getModelFromConn(conn, modelName) {
return conn.model(modelName);
}
+function matchIdsToRefPaths(ids, refPaths, refPathToFind) {
+ if (!Array.isArray(refPaths)) {
+ return refPaths === refPathToFind
+ ? Array.isArray(ids)
+ ? utils.array.flatten(ids)
+ : [ids]
+ : [];
+ }
+ if (Array.isArray(ids) && Array.isArray(refPaths)) {
+ return ids.flatMap((id, index) => matchIdsToRefPaths(id, refPaths[index], refPathToFind));
+ }
+ return [];
+}
+
/*!
* ignore
*/
diff --git a/lib/helpers/populate/modelNamesFromRefPath.js b/lib/helpers/populate/modelNamesFromRefPath.js
index df643b234ae..a5b02859346 100644
--- a/lib/helpers/populate/modelNamesFromRefPath.js
+++ b/lib/helpers/populate/modelNamesFromRefPath.js
@@ -62,7 +62,5 @@ module.exports = function modelNamesFromRefPath(refPath, doc, populatedPath, mod
modelNames = Array.isArray(refValue) ? refValue : [refValue];
}
- modelNames = utils.array.flatten(modelNames);
-
return modelNames;
};
diff --git a/lib/helpers/projection/isExclusive.js b/lib/helpers/projection/isExclusive.js
index b55cf468458..e6ca3cad5ec 100644
--- a/lib/helpers/projection/isExclusive.js
+++ b/lib/helpers/projection/isExclusive.js
@@ -1,6 +1,7 @@
'use strict';
const isDefiningProjection = require('./isDefiningProjection');
+const isPOJO = require('../isPOJO');
/*!
* ignore
@@ -22,10 +23,12 @@ module.exports = function isExclusive(projection) {
// Explicitly avoid `$meta` and `$slice`
const key = keys[ki];
if (key !== '_id' && isDefiningProjection(projection[key])) {
- exclude = (projection[key] != null && typeof projection[key] === 'object') ?
- isExclusive(projection[key]) :
+ exclude = isPOJO(projection[key]) ?
+ (isExclusive(projection[key]) ?? exclude) :
!projection[key];
- break;
+ if (exclude != null) {
+ break;
+ }
}
}
}
diff --git a/lib/helpers/projection/isInclusive.js b/lib/helpers/projection/isInclusive.js
index eebb412c4a3..c53bac02873 100644
--- a/lib/helpers/projection/isInclusive.js
+++ b/lib/helpers/projection/isInclusive.js
@@ -1,6 +1,7 @@
'use strict';
const isDefiningProjection = require('./isDefiningProjection');
+const isPOJO = require('../isPOJO');
/*!
* ignore
@@ -26,7 +27,7 @@ module.exports = function isInclusive(projection) {
// If field is truthy (1, true, etc.) and not an object, then this
// projection must be inclusive. If object, assume its $meta, $slice, etc.
if (isDefiningProjection(projection[prop]) && !!projection[prop]) {
- if (projection[prop] != null && typeof projection[prop] === 'object') {
+ if (isPOJO(projection[prop])) {
return isInclusive(projection[prop]);
} else {
return !!projection[prop];
diff --git a/lib/helpers/query/castUpdate.js b/lib/helpers/query/castUpdate.js
index eb69bc89a09..3cf30cb0e17 100644
--- a/lib/helpers/query/castUpdate.js
+++ b/lib/helpers/query/castUpdate.js
@@ -2,11 +2,13 @@
const CastError = require('../../error/cast');
const MongooseError = require('../../error/mongooseError');
+const SchemaString = require('../../schema/string');
const StrictModeError = require('../../error/strict');
const ValidationError = require('../../error/validation');
const castNumber = require('../../cast/number');
const cast = require('../../cast');
const getConstructorName = require('../getConstructorName');
+const getDiscriminatorByValue = require('../discriminator/getDiscriminatorByValue');
const getEmbeddedDiscriminatorPath = require('./getEmbeddedDiscriminatorPath');
const handleImmutable = require('./handleImmutable');
const moveImmutableProperties = require('../update/moveImmutableProperties');
@@ -61,6 +63,27 @@ module.exports = function castUpdate(schema, obj, options, context, filter) {
return obj;
}
+ if (schema != null &&
+ filter != null &&
+ utils.hasUserDefinedProperty(filter, schema.options.discriminatorKey) &&
+ typeof filter[schema.options.discriminatorKey] !== 'object' &&
+ schema.discriminators != null) {
+ const discriminatorValue = filter[schema.options.discriminatorKey];
+ const byValue = getDiscriminatorByValue(context.model.discriminators, discriminatorValue);
+ schema = schema.discriminators[discriminatorValue] ||
+ (byValue && byValue.schema) ||
+ schema;
+ } else if (schema != null &&
+ options.overwriteDiscriminatorKey &&
+ utils.hasUserDefinedProperty(obj, schema.options.discriminatorKey) &&
+ schema.discriminators != null) {
+ const discriminatorValue = obj[schema.options.discriminatorKey];
+ const byValue = getDiscriminatorByValue(context.model.discriminators, discriminatorValue);
+ schema = schema.discriminators[discriminatorValue] ||
+ (byValue && byValue.schema) ||
+ schema;
+ }
+
if (options.upsert) {
moveImmutableProperties(schema, obj, context);
}
@@ -244,7 +267,7 @@ function walkUpdatePath(schema, obj, op, options, context, filter, pref) {
}
if (op !== '$setOnInsert' &&
- handleImmutable(schematype, strict, obj, key, prefix + key, context)) {
+ handleImmutable(schematype, strict, obj, key, prefix + key, options, context)) {
continue;
}
@@ -307,6 +330,20 @@ function walkUpdatePath(schema, obj, op, options, context, filter, pref) {
continue;
}
+ hasKeys = true;
+ } else if (op === '$rename') {
+ const schematype = new SchemaString(`${prefix}${key}.$rename`);
+ try {
+ obj[key] = castUpdateVal(schematype, val, op, key, context, prefix + key);
+ } catch (error) {
+ aggregatedError = _appendError(error, context, key, aggregatedError);
+ }
+
+ if (obj[key] === void 0) {
+ delete obj[key];
+ continue;
+ }
+
hasKeys = true;
} else {
const pathToCheck = (prefix + key);
@@ -338,7 +375,7 @@ function walkUpdatePath(schema, obj, op, options, context, filter, pref) {
// You can use `$setOnInsert` with immutable keys
if (op !== '$setOnInsert' &&
- handleImmutable(schematype, strict, obj, key, prefix + key, context)) {
+ handleImmutable(schematype, strict, obj, key, prefix + key, options, context)) {
continue;
}
@@ -372,10 +409,12 @@ function walkUpdatePath(schema, obj, op, options, context, filter, pref) {
delete obj[key];
}
} else {
- // gh-1845 temporary fix: ignore $rename. See gh-3027 for tracking
- // improving this.
if (op === '$rename') {
- hasKeys = true;
+ if (obj[key] == null) {
+ throw new CastError('String', obj[key], `${prefix}${key}.$rename`);
+ }
+ const schematype = new SchemaString(`${prefix}${key}.$rename`);
+ obj[key] = schematype.castForQuery(null, obj[key], context);
continue;
}
diff --git a/lib/helpers/query/handleImmutable.js b/lib/helpers/query/handleImmutable.js
index 22adb3c50de..0102db42a46 100644
--- a/lib/helpers/query/handleImmutable.js
+++ b/lib/helpers/query/handleImmutable.js
@@ -2,7 +2,20 @@
const StrictModeError = require('../../error/strict');
-module.exports = function handleImmutable(schematype, strict, obj, key, fullPath, ctx) {
+/**
+ * Handle immutable option for a given path when casting updates based on options
+ *
+ * @param {SchemaType} schematype the resolved schematype for this path
+ * @param {Boolean | 'throw' | null} strict whether strict mode is set for this query
+ * @param {Object} obj the object containing the value being checked so we can delete
+ * @param {String} key the key in `obj` which we are checking for immutability
+ * @param {String} fullPath the full path being checked
+ * @param {Object} options the query options
+ * @param {Query} ctx the query. Passed as `this` and first param to the `immutable` option, if `immutable` is a function
+ * @returns true if field was removed, false otherwise
+ */
+
+module.exports = function handleImmutable(schematype, strict, obj, key, fullPath, options, ctx) {
if (schematype == null || !schematype.options || !schematype.options.immutable) {
return false;
}
@@ -15,6 +28,9 @@ module.exports = function handleImmutable(schematype, strict, obj, key, fullPath
return false;
}
+ if (options && options.overwriteImmutable) {
+ return false;
+ }
if (strict === false) {
return false;
}
diff --git a/lib/helpers/schema/applyReadConcern.js b/lib/helpers/schema/applyReadConcern.js
index 80d4da6eb20..050fa9c6df0 100644
--- a/lib/helpers/schema/applyReadConcern.js
+++ b/lib/helpers/schema/applyReadConcern.js
@@ -1,7 +1,5 @@
'use strict';
-const get = require('../get');
-
module.exports = function applyReadConcern(schema, options) {
if (options.readConcern !== undefined) {
return;
@@ -15,7 +13,7 @@ module.exports = function applyReadConcern(schema, options) {
return;
}
- const level = get(schema, 'options.readConcern.level', null);
+ const level = schema.options?.readConcern?.level;
if (level != null) {
options.readConcern = { level };
}
diff --git a/lib/helpers/schema/applyWriteConcern.js b/lib/helpers/schema/applyWriteConcern.js
index 27098110872..28338cf58c3 100644
--- a/lib/helpers/schema/applyWriteConcern.js
+++ b/lib/helpers/schema/applyWriteConcern.js
@@ -1,7 +1,5 @@
'use strict';
-const get = require('../get');
-
module.exports = function applyWriteConcern(schema, options) {
if (options.writeConcern != null) {
return;
@@ -12,7 +10,7 @@ module.exports = function applyWriteConcern(schema, options) {
if (options && options.session && options.session.transaction) {
return;
}
- const writeConcern = get(schema, 'options.writeConcern', {});
+ const writeConcern = schema.options.writeConcern ?? {};
if (Object.keys(writeConcern).length != 0) {
options.writeConcern = {};
if (!('w' in options) && writeConcern.w != null) {
diff --git a/lib/model.js b/lib/model.js
index e83a61be4ff..45c273a221f 100644
--- a/lib/model.js
+++ b/lib/model.js
@@ -10,6 +10,7 @@ const Document = require('./document');
const DocumentNotFoundError = require('./error/notFound');
const EventEmitter = require('events').EventEmitter;
const Kareem = require('kareem');
+const { MongoBulkWriteError } = require('mongodb');
const MongooseBulkWriteError = require('./error/bulkWriteError');
const MongooseError = require('./error/index');
const ObjectParameterError = require('./error/objectParameter');
@@ -30,7 +31,9 @@ const applyReadConcern = require('./helpers/schema/applyReadConcern');
const applySchemaCollation = require('./helpers/indexes/applySchemaCollation');
const applyStaticHooks = require('./helpers/model/applyStaticHooks');
const applyStatics = require('./helpers/model/applyStatics');
+const applyTimestampsHelper = require('./helpers/document/applyTimestamps');
const applyWriteConcern = require('./helpers/schema/applyWriteConcern');
+const applyVirtualsHelper = require('./helpers/document/applyVirtuals');
const assignVals = require('./helpers/populate/assignVals');
const castBulkWrite = require('./helpers/model/castBulkWrite');
const clone = require('./helpers/clone');
@@ -48,6 +51,7 @@ const immediate = require('./helpers/immediate');
const internalToObjectOptions = require('./options').internalToObjectOptions;
const isDefaultIdIndex = require('./helpers/indexes/isDefaultIdIndex');
const isIndexEqual = require('./helpers/indexes/isIndexEqual');
+const isTimeseriesIndex = require('./helpers/indexes/isTimeseriesIndex');
const {
getRelatedDBIndexes,
getRelatedSchemaIndexes
@@ -64,6 +68,7 @@ const STATES = require('./connectionState');
const util = require('util');
const utils = require('./utils');
const minimize = require('./helpers/minimize');
+const MongooseBulkSaveIncompleteError = require('./error/bulkSaveIncompleteError');
const modelCollectionSymbol = Symbol('mongoose#Model#collection');
const modelDbSymbol = Symbol('mongoose#Model#db');
@@ -73,8 +78,7 @@ const subclassedSymbol = Symbol('mongoose#Model#subclassed');
const { VERSION_INC, VERSION_WHERE, VERSION_ALL } = Document;
const saveToObjectOptions = Object.assign({}, internalToObjectOptions, {
- bson: true,
- flattenObjectIds: false
+ bson: true
});
/**
@@ -1218,6 +1222,7 @@ Model.createCollection = async function createCollection(options) {
*
* @param {Object} [options] options to pass to `ensureIndexes()`
* @param {Boolean} [options.background=null] if specified, overrides each index's `background` property
+ * @param {Boolean} [options.hideIndexes=false] set to `true` to hide indexes instead of dropping. Requires MongoDB server 4.4 or higher
* @return {Promise}
* @api public
*/
@@ -1414,6 +1419,10 @@ function getIndexesToDrop(schema, schemaIndexes, dbIndexes) {
if (isDefaultIdIndex(dbIndex)) {
continue;
}
+ // Timeseries collections have a default index on { timeField: 1, metaField: 1 }.
+ if (isTimeseriesIndex(dbIndex, schema.options)) {
+ continue;
+ }
for (const [schemaIndexKeysObject, schemaIndexOptions] of schemaIndexes) {
const options = decorateDiscriminatorIndexOptions(schema, clone(schemaIndexOptions));
@@ -1425,9 +1434,11 @@ function getIndexesToDrop(schema, schemaIndexes, dbIndexes) {
}
}
- if (!found) {
- toDrop.push(dbIndex.name);
+ if (found) {
+ continue;
}
+
+ toDrop.push(dbIndex.name);
}
return toDrop;
@@ -1438,8 +1449,10 @@ function getIndexesToDrop(schema, schemaIndexes, dbIndexes) {
*
* The returned promise resolves to a list of the dropped indexes' names as an array
*
- * @param {Function} [callback] optional callback
- * @return {Promise|undefined} Returns `undefined` if callback is specified, returns a promise if no callback.
+ * @param {Object} [options]
+ * @param {Array} [options.toDrop] if specified, contains a list of index names to drop
+ * @param {Boolean} [options.hideIndexes=false] set to `true` to hide indexes instead of dropping. Requires MongoDB server 4.4 or higher
+ * @return {Promise} list of dropped or hidden index names
* @api public
*/
@@ -1450,23 +1463,32 @@ Model.cleanIndexes = async function cleanIndexes(options) {
}
const model = this;
- const collection = model.$__collection;
-
if (Array.isArray(options && options.toDrop)) {
- const res = await _dropIndexes(options.toDrop, collection);
+ const res = await _dropIndexes(options.toDrop, model, options);
return res;
}
const res = await model.diffIndexes();
- return await _dropIndexes(res.toDrop, collection);
+ return await _dropIndexes(res.toDrop, model, options);
};
-async function _dropIndexes(toDrop, collection) {
+async function _dropIndexes(toDrop, model, options) {
if (toDrop.length === 0) {
return [];
}
- await Promise.all(toDrop.map(indexName => collection.dropIndex(indexName)));
+ const collection = model.$__collection;
+ if (options && options.hideIndexes) {
+ await Promise.all(toDrop.map(indexName => {
+ return model.db.db.command({
+ collMod: collection.collectionName,
+ index: { name: indexName, hidden: true }
+ });
+ }));
+ } else {
+ await Promise.all(toDrop.map(indexName => collection.dropIndex(indexName)));
+ }
+
return toDrop;
}
@@ -1652,7 +1674,24 @@ function _ensureIndexes(model, options, callback) {
}
}
- model.collection.createIndex(indexFields, indexOptions).then(
+ // Just in case `createIndex()` throws a sync error
+ let promise = null;
+ try {
+ promise = model.collection.createIndex(indexFields, indexOptions);
+ } catch (err) {
+ if (!indexError) {
+ indexError = err;
+ }
+ if (!model.$caught) {
+ model.emit('error', err);
+ }
+
+ indexSingleDone(err, indexFields, indexOptions);
+ create();
+ return;
+ }
+
+ promise.then(
name => {
indexSingleDone(null, indexFields, indexOptions, name);
create();
@@ -3107,7 +3146,7 @@ function _setIsNew(doc, val) {
doc.$emit('isNew', val);
doc.constructor.emit('isNew', val);
- const subdocs = doc.$getAllSubdocs();
+ const subdocs = doc.$getAllSubdocs({ useCache: true });
for (const subdoc of subdocs) {
subdoc.$isNew = val;
subdoc.$emit('isNew', val);
@@ -3368,11 +3407,19 @@ Model.bulkWrite = async function bulkWrite(ops, options) {
};
/**
- * takes an array of documents, gets the changes and inserts/updates documents in the database
- * according to whether or not the document is new, or whether it has changes or not.
+ * Takes an array of documents, gets the changes and inserts/updates documents in the database
+ * according to whether or not the document is new, or whether it has changes or not.
*
* `bulkSave` uses `bulkWrite` under the hood, so it's mostly useful when dealing with many documents (10K+)
*
+ * `bulkSave()` throws errors under the following conditions:
+ *
+ * - one of the provided documents fails validation. In this case, `bulkSave()` does not send a `bulkWrite()`, and throws the first validation error.
+ * - `bulkWrite()` fails (for example, due to being unable to connect to MongoDB or due to duplicate key error)
+ * - `bulkWrite()` did not insert or update **any** documents. In this case, `bulkSave()` will throw a DocumentNotFound error.
+ *
+ * Note that `bulkSave()` will **not** throw an error if only some of the `save()` calls succeeded.
+ *
* @param {Array} documents
* @param {Object} [options] options passed to the underlying `bulkWrite()`
* @param {Boolean} [options.timestamps] defaults to `null`, when set to false, mongoose will not add/update timestamps to the documents.
@@ -3380,7 +3427,7 @@ Model.bulkWrite = async function bulkWrite(ops, options) {
* @param {String|number} [options.w=1] The [write concern](https://www.mongodb.com/docs/manual/reference/write-concern/). See [`Query#w()`](https://mongoosejs.com/docs/api/query.html#Query.prototype.w()) for more information.
* @param {number} [options.wtimeout=null] The [write concern timeout](https://www.mongodb.com/docs/manual/reference/write-concern/#wtimeout).
* @param {Boolean} [options.j=true] If false, disable [journal acknowledgement](https://www.mongodb.com/docs/manual/reference/write-concern/#j-option)
- *
+ * @return {BulkWriteResult} the return value from `bulkWrite()`
*/
Model.bulkSave = async function bulkSave(documents, options) {
options = options || {};
@@ -3408,18 +3455,35 @@ Model.bulkSave = async function bulkSave(documents, options) {
(err) => ({ bulkWriteResult: null, bulkWriteError: err })
);
- await Promise.all(
- documents.map(async(document) => {
- const documentError = bulkWriteError && bulkWriteError.writeErrors.find(writeError => {
- const writeErrorDocumentId = writeError.err.op._id || writeError.err.op.q._id;
- return writeErrorDocumentId.toString() === document._doc._id.toString();
- });
+ // If not a MongoBulkWriteError, treat this as all documents failed to save.
+ if (bulkWriteError != null && !(bulkWriteError instanceof MongoBulkWriteError)) {
+ throw bulkWriteError;
+ }
- if (documentError == null) {
- await handleSuccessfulWrite(document);
- }
- })
- );
+ const matchedCount = bulkWriteResult?.matchedCount ?? 0;
+ const insertedCount = bulkWriteResult?.insertedCount ?? 0;
+ if (writeOperations.length > 0 && matchedCount + insertedCount < writeOperations.length && !bulkWriteError) {
+ throw new MongooseBulkSaveIncompleteError(
+ this.modelName,
+ documents,
+ bulkWriteResult
+ );
+ }
+
+ const successfulDocuments = [];
+ for (let i = 0; i < documents.length; i++) {
+ const document = documents[i];
+ const documentError = bulkWriteError && bulkWriteError.writeErrors.find(writeError => {
+ const writeErrorDocumentId = writeError.err.op._id || writeError.err.op.q._id;
+ return writeErrorDocumentId.toString() === document._doc._id.toString();
+ });
+
+ if (documentError == null) {
+ successfulDocuments.push(document);
+ }
+ }
+
+ await Promise.all(successfulDocuments.map(document => handleSuccessfulWrite(document)));
if (bulkWriteError && bulkWriteError.writeErrors && bulkWriteError.writeErrors.length) {
throw bulkWriteError;
@@ -3467,6 +3531,9 @@ function handleSuccessfulWrite(document) {
*/
Model.applyDefaults = function applyDefaults(doc) {
+ if (doc == null) {
+ return doc;
+ }
if (doc.$__ != null) {
applyDefaultsHelper(doc, doc.$__.fields, doc.$__.exclude);
@@ -3482,6 +3549,75 @@ Model.applyDefaults = function applyDefaults(doc) {
return doc;
};
+/**
+ * Apply this model's virtuals to a given POJO. Virtuals execute with the POJO as the context `this`.
+ *
+ * #### Example:
+ *
+ * const userSchema = new Schema({ name: String });
+ * userSchema.virtual('upper').get(function() { return this.name.toUpperCase(); });
+ * const User = mongoose.model('User', userSchema);
+ *
+ * const obj = { name: 'John' };
+ * User.applyVirtuals(obj);
+ * obj.name; // 'John'
+ * obj.upper; // 'JOHN', Mongoose applied the return value of the virtual to the given object
+ *
+ * @param {Object} obj object or document to apply virtuals on
+ * @param {Array} [virtualsToApply] optional whitelist of virtuals to apply
+ * @returns {Object} obj
+ * @api public
+ */
+
+Model.applyVirtuals = function applyVirtuals(obj, virtualsToApply) {
+ if (obj == null) {
+ return obj;
+ }
+ // Nothing to do if this is already a hydrated document - it should already have virtuals
+ if (obj.$__ != null) {
+ return obj;
+ }
+
+ applyVirtualsHelper(this.schema, obj, virtualsToApply);
+
+ return obj;
+};
+
+/**
+ * Apply this model's timestamps to a given POJO, including subdocument timestamps
+ *
+ * #### Example:
+ *
+ * const userSchema = new Schema({ name: String }, { timestamps: true });
+ * const User = mongoose.model('User', userSchema);
+ *
+ * const obj = { name: 'John' };
+ * User.applyTimestamps(obj);
+ * obj.createdAt; // 2024-06-01T18:00:00.000Z
+ * obj.updatedAt; // 2024-06-01T18:00:00.000Z
+ *
+ * @param {Object} obj object or document to apply virtuals on
+ * @param {Object} [options]
+ * @param {Boolean} [options.isUpdate=false] if true, treat this as an update: just set updatedAt, skip setting createdAt. If false, set both createdAt and updatedAt
+ * @param {Function} [options.currentTime] if set, Mongoose will call this function to get the current time.
+ * @returns {Object} obj
+ * @api public
+ */
+
+Model.applyTimestamps = function applyTimestamps(obj, options) {
+ if (obj == null) {
+ return obj;
+ }
+ // Nothing to do if this is already a hydrated document - it should already have timestamps
+ if (obj.$__ != null) {
+ return obj;
+ }
+
+ applyTimestampsHelper(this.schema, obj, options);
+
+ return obj;
+};
+
/**
* Cast the given POJO to the model's schema
*
@@ -3713,7 +3849,7 @@ Model.hydrate = function(obj, projection, options) {
* const res = await Person.updateMany({ name: /Stark$/ }, { isDeleted: true });
* res.matchedCount; // Number of documents matched
* res.modifiedCount; // Number of documents modified
- * res.acknowledged; // Boolean indicating everything went smoothly.
+ * res.acknowledged; // Boolean indicating the MongoDB server received the operation. This may be false if Mongoose did not send an update to the server because the update was empty.
* res.upsertedId; // null or an id containing a document that had to be upserted.
* res.upsertedCount; // Number indicating how many documents had to be upserted. Will either be 0 or 1.
*
@@ -3753,7 +3889,7 @@ Model.updateMany = function updateMany(conditions, doc, options) {
* const res = await Person.updateOne({ name: 'Jean-Luc Picard' }, { ship: 'USS Enterprise' });
* res.matchedCount; // Number of documents matched
* res.modifiedCount; // Number of documents modified
- * res.acknowledged; // Boolean indicating everything went smoothly.
+ * res.acknowledged; // Boolean indicating the MongoDB server received the operation. This may be false if Mongoose did not send an update to the server because the update was empty.
* res.upsertedId; // null or an id containing a document that had to be upserted.
* res.upsertedCount; // Number indicating how many documents had to be upserted. Will either be 0 or 1.
*
@@ -3791,7 +3927,7 @@ Model.updateOne = function updateOne(conditions, doc, options) {
* const res = await Person.replaceOne({ _id: 24601 }, { name: 'Jean Valjean' });
* res.matchedCount; // Number of documents matched
* res.modifiedCount; // Number of documents modified
- * res.acknowledged; // Boolean indicating everything went smoothly.
+ * res.acknowledged; // Boolean indicating the MongoDB server received the operation.
* res.upsertedId; // null or an id containing a document that had to be upserted.
* res.upsertedCount; // Number indicating how many documents had to be upserted. Will either be 0 or 1.
*
diff --git a/lib/mongoose.js b/lib/mongoose.js
index f314e4c399a..2a03b638209 100644
--- a/lib/mongoose.js
+++ b/lib/mongoose.js
@@ -661,6 +661,8 @@ Mongoose.prototype._model = function(name, schema, collection, options) {
utils.toCollectionName(name, _mongoose.pluralize());
}
+ applyEmbeddedDiscriminators(schema);
+
const connection = options.connection || _mongoose.connection;
model = _mongoose.Model.compile(model || name, schema, collection, connection, _mongoose);
// Errors handled internally, so safe to ignore error
@@ -678,8 +680,6 @@ Mongoose.prototype._model = function(name, schema, collection, options) {
}
}
- applyEmbeddedDiscriminators(schema);
-
return model;
};
diff --git a/lib/options.js b/lib/options.js
index 3bae58e1200..bbdcda8b97e 100644
--- a/lib/options.js
+++ b/lib/options.js
@@ -12,5 +12,6 @@ exports.internalToObjectOptions = {
depopulate: true,
flattenDecimals: false,
useProjection: false,
- versionKey: true
+ versionKey: true,
+ flattenObjectIds: false
};
diff --git a/lib/options/saveOptions.js b/lib/options/saveOptions.js
index 66c1608b1d5..286987ee1e4 100644
--- a/lib/options/saveOptions.js
+++ b/lib/options/saveOptions.js
@@ -11,4 +11,6 @@ class SaveOptions {
}
}
+SaveOptions.prototype.__subdocs = null;
+
module.exports = SaveOptions;
diff --git a/lib/plugins/saveSubdocs.js b/lib/plugins/saveSubdocs.js
index 4b47bd73320..6dc0cc9e2e7 100644
--- a/lib/plugins/saveSubdocs.js
+++ b/lib/plugins/saveSubdocs.js
@@ -15,7 +15,7 @@ module.exports = function saveSubdocs(schema) {
}
const _this = this;
- const subdocs = this.$getAllSubdocs();
+ const subdocs = this.$getAllSubdocs({ useCache: true });
if (!subdocs.length) {
next();
@@ -27,6 +27,8 @@ module.exports = function saveSubdocs(schema) {
cb(err);
});
}, function(error) {
+ // Bust subdocs cache because subdoc pre hooks can add new subdocuments
+ _this.$__.saveOptions.__subdocs = null;
if (error) {
return _this.$__schema.s.hooks.execPost('save:error', _this, [_this], { error: error }, function(error) {
next(error);
@@ -64,7 +66,7 @@ module.exports = function saveSubdocs(schema) {
}
const _this = this;
- const subdocs = this.$getAllSubdocs();
+ const subdocs = this.$getAllSubdocs({ useCache: true });
if (!subdocs.length) {
return;
diff --git a/lib/plugins/trackTransaction.js b/lib/plugins/trackTransaction.js
index af5c7c84da4..857caac6044 100644
--- a/lib/plugins/trackTransaction.js
+++ b/lib/plugins/trackTransaction.js
@@ -27,13 +27,6 @@ module.exports = function trackTransaction(schema) {
initialState.atomics = _getAtomics(this);
session[sessionNewDocuments].set(this, initialState);
- } else {
- const state = session[sessionNewDocuments].get(this);
-
- for (const path of Object.keys(this.$__.activePaths.getStatePaths('modify'))) {
- state.modifiedPaths.add(path);
- }
- state.atomics = _getAtomics(this, state.atomics);
}
});
};
diff --git a/lib/query.js b/lib/query.js
index dec846a2dcb..a45d32fb91d 100644
--- a/lib/query.js
+++ b/lib/query.js
@@ -22,7 +22,6 @@ const castUpdate = require('./helpers/query/castUpdate');
const clone = require('./helpers/clone');
const getDiscriminatorByValue = require('./helpers/discriminator/getDiscriminatorByValue');
const helpers = require('./queryHelpers');
-const immediate = require('./helpers/immediate');
const internalToObjectOptions = require('./options').internalToObjectOptions;
const isExclusive = require('./helpers/projection/isExclusive');
const isInclusive = require('./helpers/projection/isInclusive');
@@ -1142,6 +1141,38 @@ Query.prototype.select = function select() {
throw new TypeError('Invalid select() argument. Must be string or object.');
};
+/**
+ * Enable or disable schema level projections for this query. Enabled by default.
+ * Set to `false` to include fields with `select: false` in the query result by default.
+ *
+ * #### Example:
+ *
+ * const userSchema = new Schema({
+ * email: { type: String, required: true },
+ * passwordHash: { type: String, select: false, required: true }
+ * });
+ * const UserModel = mongoose.model('User', userSchema);
+ *
+ * const doc = await UserModel.findOne().orFail().schemaLevelProjections(false);
+ *
+ * // Contains password hash, because `schemaLevelProjections()` overrides `select: false`
+ * doc.passwordHash;
+ *
+ * @method schemaLevelProjections
+ * @memberOf Query
+ * @instance
+ * @param {Boolean} value
+ * @return {Query} this
+ * @see SchemaTypeOptions https://mongoosejs.com/docs/schematypes.html#all-schema-types
+ * @api public
+ */
+
+Query.prototype.schemaLevelProjections = function schemaLevelProjections(value) {
+ this._mongooseOptions.schemaLevelProjections = value;
+
+ return this;
+};
+
/**
* Sets this query's `sanitizeProjection` option. If set, `sanitizeProjection` does
* two things:
@@ -1594,6 +1625,7 @@ Query.prototype.getOptions = function() {
* - [writeConcern](https://www.mongodb.com/docs/manual/reference/method/db.collection.update/)
* - [timestamps](https://mongoosejs.com/docs/guide.html#timestamps): If `timestamps` is set in the schema, set this option to `false` to skip timestamps for that particular update. Has no effect if `timestamps` is not enabled in the schema options.
* - overwriteDiscriminatorKey: allow setting the discriminator key in the update. Will use the correct discriminator schema if the update changes the discriminator key.
+ * - overwriteImmutable: allow overwriting properties that are set to `immutable` in the schema. Defaults to false.
*
* The following options are only for `find()`, `findOne()`, `findById()`, `findOneAndUpdate()`, `findOneAndReplace()`, `findOneAndDelete()`, and `findByIdAndUpdate()`:
*
@@ -1665,6 +1697,10 @@ Query.prototype.setOptions = function(options, overwrite) {
this._mongooseOptions.overwriteDiscriminatorKey = options.overwriteDiscriminatorKey;
delete options.overwriteDiscriminatorKey;
}
+ if ('overwriteImmutable' in options) {
+ this._mongooseOptions.overwriteImmutable = options.overwriteImmutable;
+ delete options.overwriteImmutable;
+ }
if ('sanitizeProjection' in options) {
if (options.sanitizeProjection && !this._mongooseOptions.sanitizeProjection) {
sanitizeProjection(this._fields);
@@ -1689,6 +1725,10 @@ Query.prototype.setOptions = function(options, overwrite) {
this._mongooseOptions.translateAliases = options.translateAliases;
delete options.translateAliases;
}
+ if ('schemaLevelProjections' in options) {
+ this._mongooseOptions.schemaLevelProjections = options.schemaLevelProjections;
+ delete options.schemaLevelProjections;
+ }
if (options.lean == null && this.schema && 'lean' in this.schema.options) {
this._mongooseOptions.lean = this.schema.options.lean;
@@ -2207,6 +2247,9 @@ Query.prototype.error = function error(err) {
*/
Query.prototype._unsetCastError = function _unsetCastError() {
+ if (this._error == null) {
+ return;
+ }
if (this._error != null && !(this._error instanceof CastError)) {
return;
}
@@ -2222,6 +2265,7 @@ Query.prototype._unsetCastError = function _unsetCastError() {
* - `strict`: controls how Mongoose handles keys that aren't in the schema for updates. This option is `true` by default, which means Mongoose will silently strip any paths in the update that aren't in the schema. See the [`strict` mode docs](https://mongoosejs.com/docs/guide.html#strict) for more information.
* - `strictQuery`: controls how Mongoose handles keys that aren't in the schema for the query `filter`. This option is `false` by default, which means Mongoose will allow `Model.find({ foo: 'bar' })` even if `foo` is not in the schema. See the [`strictQuery` docs](https://mongoosejs.com/docs/guide.html#strictQuery) for more information.
* - `nearSphere`: use `$nearSphere` instead of `near()`. See the [`Query.prototype.nearSphere()` docs](https://mongoosejs.com/docs/api/query.html#Query.prototype.nearSphere())
+ * - `schemaLevelProjections`: if `false`, Mongoose will not apply schema-level `select: false` or `select: true` for this query
*
* Mongoose maintains a separate object for internal options because
* Mongoose sends `Query.prototype.options` to the MongoDB server, and the
@@ -2249,9 +2293,9 @@ Query.prototype.mongooseOptions = function(v) {
Query.prototype._castConditions = function() {
let sanitizeFilterOpt = undefined;
- if (this.model != null && utils.hasUserDefinedProperty(this.model.db.options, 'sanitizeFilter')) {
+ if (this.model?.db.options?.sanitizeFilter != null) {
sanitizeFilterOpt = this.model.db.options.sanitizeFilter;
- } else if (this.model != null && utils.hasUserDefinedProperty(this.model.base.options, 'sanitizeFilter')) {
+ } else if (this.model?.base.options?.sanitizeFilter != null) {
sanitizeFilterOpt = this.model.base.options.sanitizeFilter;
} else {
sanitizeFilterOpt = this._mongooseOptions.sanitizeFilter;
@@ -2494,13 +2538,12 @@ Query.prototype.collation = function(value) {
* @api private
*/
-Query.prototype._completeOne = function(doc, res, callback) {
+Query.prototype._completeOne = function(doc, res, projection, callback) {
if (!doc && !this.options.includeResultMetadata) {
return callback(null, null);
}
const model = this.model;
- const projection = clone(this._fields);
const userProvidedFields = this._userProvidedFields || {};
// `populate`, `lean`
const mongooseOptions = this._mongooseOptions;
@@ -2601,7 +2644,7 @@ Query.prototype._findOne = async function _findOne() {
// don't pass in the conditions because we already merged them in
const doc = await this.mongooseCollection.findOne(this._conditions, options);
return new Promise((resolve, reject) => {
- this._completeOne(doc, null, (err, res) => {
+ this._completeOne(doc, null, options.projection, (err, res) => {
if (err) {
return reject(err);
}
@@ -3196,7 +3239,7 @@ function completeOne(model, doc, res, options, fields, userProvidedFields, pop,
function _init(err, casted) {
if (err) {
- return immediate(() => callback(err));
+ return callback(err);
}
@@ -3209,12 +3252,12 @@ function completeOne(model, doc, res, options, fields, userProvidedFields, pop,
} else {
res.value = null;
}
- return immediate(() => callback(null, res));
+ return callback(null, res);
}
if (options.session != null) {
casted.$session(options.session);
}
- immediate(() => callback(null, casted));
+ callback(null, casted);
}
}
@@ -3281,6 +3324,7 @@ function prepareDiscriminatorCriteria(query) {
* @param {Boolean} [options.returnOriginal=null] An alias for the `new` option. `returnOriginal: false` is equivalent to `new: true`.
* @param {Boolean} [options.translateAliases=null] If set to `true`, translates any schema-defined aliases in `filter`, `projection`, `update`, and `distinct`. Throws an error if there are any conflicts where both alias and raw property are defined on the same object.
* @param {Boolean} [options.overwriteDiscriminatorKey=false] Mongoose removes discriminator key updates from `update` by default, set `overwriteDiscriminatorKey` to `true` to allow updating the discriminator key
+ * @param {Boolean} [options.overwriteImmutable=false] Mongoose removes updated immutable properties from `update` by default (excluding $setOnInsert). Set `overwriteImmutable` to `true` to allow updating immutable properties using other update operators.
* @see Tutorial https://mongoosejs.com/docs/tutorials/findoneandupdate.html
* @see findAndModify command https://www.mongodb.com/docs/manual/reference/command/findAndModify/
* @see ModifyResult https://mongodb.github.io/node-mongodb-native/4.9/interfaces/ModifyResult.html
@@ -3422,7 +3466,7 @@ Query.prototype._findOneAndUpdate = async function _findOneAndUpdate() {
const doc = !options.includeResultMetadata ? res : res.value;
return new Promise((resolve, reject) => {
- this._completeOne(doc, res, (err, res) => {
+ this._completeOne(doc, res, options.projection, (err, res) => {
if (err) {
return reject(err);
}
@@ -3518,7 +3562,7 @@ Query.prototype._findOneAndDelete = async function _findOneAndDelete() {
const doc = !includeResultMetadata ? res : res.value;
return new Promise((resolve, reject) => {
- this._completeOne(doc, res, (err, res) => {
+ this._completeOne(doc, res, options.projection, (err, res) => {
if (err) {
return reject(err);
}
@@ -3672,7 +3716,7 @@ Query.prototype._findOneAndReplace = async function _findOneAndReplace() {
const doc = !includeResultMetadata ? res : res.value;
return new Promise((resolve, reject) => {
- this._completeOne(doc, res, (err, res) => {
+ this._completeOne(doc, res, options.projection, (err, res) => {
if (err) {
return reject(err);
}
@@ -3979,6 +4023,7 @@ Query.prototype._replaceOne = async function _replaceOne() {
* @param {Boolean} [options.timestamps=null] If set to `false` and [schema-level timestamps](https://mongoosejs.com/docs/guide.html#timestamps) are enabled, skip timestamps for this update. Does nothing if schema-level timestamps are not set.
* @param {Boolean} [options.translateAliases=null] If set to `true`, translates any schema-defined aliases in `filter`, `projection`, `update`, and `distinct`. Throws an error if there are any conflicts where both alias and raw property are defined on the same object.
* @param {Boolean} [options.overwriteDiscriminatorKey=false] Mongoose removes discriminator key updates from `update` by default, set `overwriteDiscriminatorKey` to `true` to allow updating the discriminator key
+ * @param {Boolean} [options.overwriteImmutable=false] Mongoose removes updated immutable properties from `update` by default (excluding $setOnInsert). Set `overwriteImmutable` to `true` to allow updating immutable properties using other update operators.
* @param {Function} [callback] params are (error, writeOpResult)
* @return {Query} this
* @see Model.update https://mongoosejs.com/docs/api/model.html#Model.update()
@@ -4049,6 +4094,7 @@ Query.prototype.updateMany = function(conditions, doc, options, callback) {
* @param {Boolean} [options.timestamps=null] If set to `false` and [schema-level timestamps](https://mongoosejs.com/docs/guide.html#timestamps) are enabled, skip timestamps for this update. Note that this allows you to overwrite timestamps. Does nothing if schema-level timestamps are not set.
* @param {Boolean} [options.translateAliases=null] If set to `true`, translates any schema-defined aliases in `filter`, `projection`, `update`, and `distinct`. Throws an error if there are any conflicts where both alias and raw property are defined on the same object.
* @param {Boolean} [options.overwriteDiscriminatorKey=false] Mongoose removes discriminator key updates from `update` by default, set `overwriteDiscriminatorKey` to `true` to allow updating the discriminator key
+ * @param {Boolean} [options.overwriteImmutable=false] Mongoose removes updated immutable properties from `update` by default (excluding $setOnInsert). Set `overwriteImmutable` to `true` to allow updating immutable properties using other update operators.
* @param {Function} [callback] params are (error, writeOpResult)
* @return {Query} this
* @see Model.update https://mongoosejs.com/docs/api/model.html#Model.update()
@@ -4387,10 +4433,10 @@ Query.prototype.exec = async function exec(op) {
str = str.slice(0, 60) + '...';
}
const err = new MongooseError('Query was already executed: ' + str);
- err.originalStack = this._executionStack.stack;
+ err.originalStack = this._executionStack;
throw err;
} else {
- this._executionStack = new Error();
+ this._executionStack = new Error().stack;
}
let skipWrappedFunction = null;
@@ -4654,23 +4700,12 @@ Query.prototype._castUpdate = function _castUpdate(obj) {
upsert = this.options.upsert;
}
- const filter = this._conditions;
- if (schema != null &&
- utils.hasUserDefinedProperty(filter, schema.options.discriminatorKey) &&
- typeof filter[schema.options.discriminatorKey] !== 'object' &&
- schema.discriminators != null) {
- const discriminatorValue = filter[schema.options.discriminatorKey];
- const byValue = getDiscriminatorByValue(this.model.discriminators, discriminatorValue);
- schema = schema.discriminators[discriminatorValue] ||
- (byValue && byValue.schema) ||
- schema;
- }
-
return castUpdate(schema, obj, {
strict: this._mongooseOptions.strict,
upsert: upsert,
arrayFilters: this.options.arrayFilters,
- overwriteDiscriminatorKey: this._mongooseOptions.overwriteDiscriminatorKey
+ overwriteDiscriminatorKey: this._mongooseOptions.overwriteDiscriminatorKey,
+ overwriteImmutable: this._mongooseOptions.overwriteImmutable
}, this, this._conditions);
};
@@ -4863,6 +4898,9 @@ Query.prototype.cast = function(model, obj) {
opts.strictQuery = this.options.strictQuery;
}
}
+ if ('sanitizeFilter' in this._mongooseOptions) {
+ opts.sanitizeFilter = this._mongooseOptions.sanitizeFilter;
+ }
try {
return cast(model.schema, obj, opts, this);
@@ -4946,7 +4984,11 @@ Query.prototype._applyPaths = function applyPaths() {
sanitizeProjection = this._mongooseOptions.sanitizeProjection;
}
- helpers.applyPaths(this._fields, this.model.schema, sanitizeProjection);
+ const schemaLevelProjections = this._mongooseOptions.schemaLevelProjections ?? true;
+
+ if (schemaLevelProjections) {
+ helpers.applyPaths(this._fields, this.model.schema, sanitizeProjection);
+ }
let _selectPopulatedPaths = true;
diff --git a/lib/schema.js b/lib/schema.js
index bb3480088c6..7caaac75920 100644
--- a/lib/schema.js
+++ b/lib/schema.js
@@ -1126,6 +1126,13 @@ Schema.prototype.path = function(path, obj) {
this.paths[mapPath] = schemaType.$__schemaType;
this.mapPaths.push(this.paths[mapPath]);
+ if (schemaType.$__schemaType.$isSingleNested) {
+ this.childSchemas.push({
+ schema: schemaType.$__schemaType.schema,
+ model: schemaType.$__schemaType.caster,
+ path: path
+ });
+ }
}
if (schemaType.$isSingleNested) {
@@ -1154,7 +1161,8 @@ Schema.prototype.path = function(path, obj) {
schemaType.caster.base = this.base;
this.childSchemas.push({
schema: schemaType.schema,
- model: schemaType.caster
+ model: schemaType.caster,
+ path: path
});
} else if (schemaType.$isMongooseDocumentArray) {
Object.defineProperty(schemaType.schema, 'base', {
@@ -1167,7 +1175,8 @@ Schema.prototype.path = function(path, obj) {
schemaType.casterConstructor.base = this.base;
this.childSchemas.push({
schema: schemaType.schema,
- model: schemaType.casterConstructor
+ model: schemaType.casterConstructor,
+ path: path
});
}
@@ -1235,7 +1244,9 @@ function gatherChildSchemas(schema) {
for (const path of Object.keys(schema.paths)) {
const schematype = schema.paths[path];
if (schematype.$isMongooseDocumentArray || schematype.$isSingleNested) {
- childSchemas.push({ schema: schematype.schema, model: schematype.caster });
+ childSchemas.push({ schema: schematype.schema, model: schematype.caster, path: path });
+ } else if (schematype.$isSchemaMap && schematype.$__schemaType.$isSingleNested) {
+ childSchemas.push({ schema: schematype.$__schemaType.schema, model: schematype.$__schemaType.caster, path: path });
}
}
@@ -2304,6 +2315,7 @@ Schema.prototype.indexes = function() {
* @param {Boolean} [options.count=false] Only works with populate virtuals. If [truthy](https://masteringjs.io/tutorials/fundamentals/truthy), this populate virtual will contain the number of documents rather than the documents themselves when you `populate()`.
* @param {Function|null} [options.get=null] Adds a [getter](https://mongoosejs.com/docs/tutorials/getters-setters.html) to this virtual to transform the populated doc.
* @param {Object|Function} [options.match=null] Apply a default [`match` option to populate](https://mongoosejs.com/docs/populate.html#match), adding an additional filter to the populate query.
+ * @param {Boolean} [options.applyToArray=false] If true and the given `name` is a direct child of an array, apply the virtual to the array rather than the elements.
* @return {VirtualType}
*/
@@ -2416,6 +2428,15 @@ Schema.prototype.virtual = function(name, options) {
return mem[part];
}, this.tree);
+ if (options && options.applyToArray && parts.length > 1) {
+ const path = this.path(parts.slice(0, -1).join('.'));
+ if (path && path.$isMongooseArray) {
+ return path.virtual(parts[parts.length - 1], options);
+ } else {
+ throw new MongooseError(`Path "${path}" is not an array`);
+ }
+ }
+
return virtuals[name];
};
diff --git a/lib/schema/array.js b/lib/schema/array.js
index 00774ee3147..e424731e4d6 100644
--- a/lib/schema/array.js
+++ b/lib/schema/array.js
@@ -11,9 +11,12 @@ const SchemaArrayOptions = require('../options/schemaArrayOptions');
const SchemaType = require('../schemaType');
const CastError = SchemaType.CastError;
const Mixed = require('./mixed');
+const VirtualOptions = require('../options/virtualOptions');
+const VirtualType = require('../virtualType');
const arrayDepth = require('../helpers/arrayDepth');
const cast = require('../cast');
const clone = require('../helpers/clone');
+const getConstructorName = require('../helpers/getConstructorName');
const isOperator = require('../helpers/query/isOperator');
const util = require('util');
const utils = require('../utils');
@@ -217,6 +220,12 @@ SchemaArray._checkRequired = SchemaType.prototype.checkRequired;
SchemaArray.checkRequired = SchemaType.checkRequired;
+/*!
+ * Virtuals defined on this array itself.
+ */
+
+SchemaArray.prototype.virtuals = null;
+
/**
* Check if the given value satisfies the `required` validator.
*
@@ -575,6 +584,32 @@ SchemaArray.prototype.castForQuery = function($conditional, val, context) {
}
};
+/**
+ * Add a virtual to this array. Specifically to this array, not the individual elements.
+ *
+ * @param {String} name
+ * @param {Object} [options]
+ * @api private
+ */
+
+SchemaArray.prototype.virtual = function virtual(name, options) {
+ if (name instanceof VirtualType || getConstructorName(name) === 'VirtualType') {
+ return this.virtual(name.path, name.options);
+ }
+ options = new VirtualOptions(options);
+
+ if (utils.hasUserDefinedProperty(options, ['ref', 'refPath'])) {
+ throw new MongooseError('Cannot set populate virtual as a property of an array');
+ }
+
+ const virtual = new VirtualType(options, name);
+ if (this.virtuals === null) {
+ this.virtuals = {};
+ }
+ this.virtuals[name] = virtual;
+ return virtual;
+};
+
function cast$all(val, context) {
if (!Array.isArray(val)) {
val = [val];
diff --git a/lib/schema/buffer.js b/lib/schema/buffer.js
index 6444ebd8835..e5cec2e0158 100644
--- a/lib/schema/buffer.js
+++ b/lib/schema/buffer.js
@@ -219,6 +219,14 @@ SchemaBuffer.prototype.cast = function(value, doc, init) {
return ret;
}
+ if (utils.isPOJO(value) && (value.$binary instanceof Binary || typeof value.$binary === 'string')) {
+ const buf = this.cast(Buffer.from(value.$binary, 'base64'));
+ if (value.$type != null) {
+ buf._subtype = value.$type;
+ return buf;
+ }
+ }
+
throw new CastError('Buffer', value, this.path, null, this);
};
diff --git a/lib/schema/documentArray.js b/lib/schema/documentArray.js
index aa0c0d7984a..9a7a5d3181d 100644
--- a/lib/schema/documentArray.js
+++ b/lib/schema/documentArray.js
@@ -429,7 +429,7 @@ SchemaDocumentArray.prototype.cast = function(value, doc, init, prev, options) {
// We need to create a new array, otherwise change tracking will
// update the old doc (gh-4449)
if (!options.skipDocumentArrayCast || utils.isMongooseDocumentArray(value)) {
- value = new MongooseDocumentArray(value, path, doc);
+ value = new MongooseDocumentArray(value, path, doc, this);
}
if (prev != null) {
diff --git a/lib/schema/uuid.js b/lib/schema/uuid.js
index aa72c42107f..1fbfc38654d 100644
--- a/lib/schema/uuid.js
+++ b/lib/schema/uuid.js
@@ -26,19 +26,6 @@ function hex2buffer(hex) {
return buff;
}
-/**
- * Helper function to convert the buffer input to a string
- * @param {Buffer} buf The buffer to convert to a hex-string
- * @returns {String} The buffer as a hex-string
- * @api private
- */
-
-function binary2hex(buf) {
- // use buffer built-in function to convert from buffer to hex-string
- const hex = buf != null && buf.toString('hex');
- return hex;
-}
-
/**
* Convert a String to Binary
* @param {String} uuidStr The value to process
@@ -67,7 +54,7 @@ function binaryToString(uuidBin) {
// i(hasezoey) dont quite know why, but "uuidBin" may sometimes also be the already processed string
let hex;
if (typeof uuidBin !== 'string' && uuidBin != null) {
- hex = binary2hex(uuidBin);
+ hex = uuidBin.toString('hex');
const uuidStr = hex.substring(0, 8) + '-' + hex.substring(8, 8 + 4) + '-' + hex.substring(12, 12 + 4) + '-' + hex.substring(16, 16 + 4) + '-' + hex.substring(20, 20 + 12);
return uuidStr;
}
@@ -90,7 +77,15 @@ function SchemaUUID(key, options) {
if (value != null && value.$__ != null) {
return value;
}
- return binaryToString(value);
+ if (Buffer.isBuffer(value)) {
+ return binaryToString(value);
+ } else if (value instanceof Binary) {
+ return binaryToString(value.buffer);
+ } else if (utils.isPOJO(value) && value.type === 'Buffer' && Array.isArray(value.data)) {
+ // Cloned buffers look like `{ type: 'Buffer', data: [5, 224, ...] }`
+ return binaryToString(Buffer.from(value.data));
+ }
+ return value;
});
}
diff --git a/lib/schemaType.js b/lib/schemaType.js
index f95ecbb3226..e5aa476468f 100644
--- a/lib/schemaType.js
+++ b/lib/schemaType.js
@@ -1724,6 +1724,25 @@ SchemaType.prototype.clone = function() {
return schematype;
};
+/**
+ * Returns the embedded schema type, if any. For arrays, document arrays, and maps, `getEmbeddedSchemaType()`
+ * returns the schema type of the array's elements (or map's elements). For other types, `getEmbeddedSchemaType()`
+ * returns `undefined`.
+ *
+ * #### Example:
+ *
+ * const schema = new Schema({ name: String, tags: [String] });
+ * schema.path('name').getEmbeddedSchemaType(); // undefined
+ * schema.path('tags').getEmbeddedSchemaType(); // SchemaString { path: 'tags', ... }
+ *
+ * @returns {SchemaType} embedded schematype
+ * @api public
+ */
+
+SchemaType.prototype.getEmbeddedSchemaType = function getEmbeddedSchemaType() {
+ return this.$embeddedSchemaType;
+};
+
/*!
* Module exports.
*/
diff --git a/lib/types/array/index.js b/lib/types/array/index.js
index 4a8c98823a7..1f6e6a54d88 100644
--- a/lib/types/array/index.js
+++ b/lib/types/array/index.js
@@ -90,6 +90,9 @@ function MongooseArray(values, path, doc, schematype) {
if (mongooseArrayMethods.hasOwnProperty(prop)) {
return mongooseArrayMethods[prop];
}
+ if (schematype && schematype.virtuals && schematype.virtuals.hasOwnProperty(prop)) {
+ return schematype.virtuals[prop].applyGetters(undefined, target);
+ }
if (typeof prop === 'string' && numberRE.test(prop) && schematype?.$embeddedSchemaType != null) {
return schematype.$embeddedSchemaType.applyGetters(__array[prop], doc);
}
@@ -101,6 +104,8 @@ function MongooseArray(values, path, doc, schematype) {
mongooseArrayMethods.set.call(proxy, prop, value, false);
} else if (internals.hasOwnProperty(prop)) {
internals[prop] = value;
+ } else if (schematype && schematype.virtuals && schematype.virtuals.hasOwnProperty(prop)) {
+ schematype.virtuals[prop].applySetters(value, target);
} else {
__array[prop] = value;
}
diff --git a/lib/types/array/methods/index.js b/lib/types/array/methods/index.js
index cf31914bb7e..3322bbe56e8 100644
--- a/lib/types/array/methods/index.js
+++ b/lib/types/array/methods/index.js
@@ -410,6 +410,7 @@ const methods = {
addToSet() {
_checkManualPopulation(this, arguments);
+ _depopulateIfNecessary(this, arguments);
const values = [].map.call(arguments, this._mapCast, this);
const added = [];
@@ -691,6 +692,7 @@ const methods = {
}
_checkManualPopulation(this, values);
+ _depopulateIfNecessary(this, values);
values = [].map.call(values, this._mapCast, this);
let ret;
@@ -1009,6 +1011,30 @@ function _checkManualPopulation(arr, docs) {
}
}
+/*!
+ * If `docs` isn't all instances of the right model, depopulate `arr`
+ */
+
+function _depopulateIfNecessary(arr, docs) {
+ const ref = arr == null ?
+ null :
+ arr[arraySchemaSymbol] && arr[arraySchemaSymbol].caster && arr[arraySchemaSymbol].caster.options && arr[arraySchemaSymbol].caster.options.ref || null;
+ const parentDoc = arr[arrayParentSymbol];
+ const path = arr[arrayPathSymbol];
+ if (!ref || !parentDoc.populated(path)) {
+ return;
+ }
+ for (const doc of docs) {
+ if (doc == null) {
+ continue;
+ }
+ if (typeof doc !== 'object' || doc instanceof String || doc instanceof Number || doc instanceof Buffer || utils.isMongooseType(doc)) {
+ parentDoc.depopulate(path);
+ break;
+ }
+ }
+}
+
const returnVanillaArrayMethods = [
'filter',
'flat',
diff --git a/lib/types/documentArray/index.js b/lib/types/documentArray/index.js
index 4877f1a30ef..863d40ae62b 100644
--- a/lib/types/documentArray/index.js
+++ b/lib/types/documentArray/index.js
@@ -28,7 +28,7 @@ const numberRE = /^\d+$/;
* @see https://bit.ly/f6CnZU
*/
-function MongooseDocumentArray(values, path, doc) {
+function MongooseDocumentArray(values, path, doc, schematype) {
const __array = [];
const internals = {
@@ -84,6 +84,9 @@ function MongooseDocumentArray(values, path, doc) {
if (DocumentArrayMethods.hasOwnProperty(prop)) {
return DocumentArrayMethods[prop];
}
+ if (schematype && schematype.virtuals && schematype.virtuals.hasOwnProperty(prop)) {
+ return schematype.virtuals[prop].applyGetters(undefined, target);
+ }
if (ArrayMethods.hasOwnProperty(prop)) {
return ArrayMethods[prop];
}
@@ -95,6 +98,8 @@ function MongooseDocumentArray(values, path, doc) {
DocumentArrayMethods.set.call(proxy, prop, value, false);
} else if (internals.hasOwnProperty(prop)) {
internals[prop] = value;
+ } else if (schematype && schematype.virtuals && schematype.virtuals.hasOwnProperty(prop)) {
+ schematype.virtuals[prop].applySetters(value, target);
} else {
__array[prop] = value;
}
diff --git a/package.json b/package.json
index eb38af3ed03..10bffe5cf55 100644
--- a/package.json
+++ b/package.json
@@ -1,7 +1,7 @@
{
"name": "mongoose",
"description": "Mongoose MongoDB ODM",
- "version": "8.6.0",
+ "version": "8.8.3",
"author": "Guillermo Rauch ",
"keywords": [
"mongodb",
@@ -21,17 +21,17 @@
"dependencies": {
"bson": "^6.7.0",
"kareem": "2.6.3",
- "mongodb": "6.8.0",
+ "mongodb": "~6.10.0",
"mpath": "0.9.0",
"mquery": "5.0.0",
"ms": "2.1.3",
"sift": "17.1.3"
},
"devDependencies": {
- "@babel/core": "7.24.7",
- "@babel/preset-env": "7.25.3",
- "@typescript-eslint/eslint-plugin": "^6.21.0",
- "@typescript-eslint/parser": "^6.21.0",
+ "@babel/core": "7.26.0",
+ "@babel/preset-env": "7.26.0",
+ "@typescript-eslint/eslint-plugin": "^8.4.0",
+ "@typescript-eslint/parser": "^8.4.0",
"acquit": "1.3.0",
"acquit-ignore": "0.2.1",
"acquit-require": "0.1.1",
@@ -40,8 +40,8 @@
"babel-loader": "8.2.5",
"broken-link-checker": "^0.7.8",
"buffer": "^5.6.0",
- "cheerio": "1.0.0-rc.12",
- "crypto-browserify": "3.12.0",
+ "cheerio": "1.0.0",
+ "crypto-browserify": "3.12.1",
"dotenv": "16.4.5",
"dox": "1.0.0",
"eslint": "8.57.0",
@@ -52,22 +52,22 @@
"highlight.js": "11.10.0",
"lodash.isequal": "4.5.0",
"lodash.isequalwith": "4.4.0",
- "markdownlint-cli2": "^0.13.0",
- "marked": "14.0.0",
+ "markdownlint-cli2": "^0.14.0",
+ "marked": "14.1.3",
"mkdirp": "^3.0.1",
- "mocha": "10.7.0",
+ "mocha": "10.8.2",
"moment": "2.30.1",
- "mongodb-memory-server": "10.0.0",
+ "mongodb-memory-server": "10.1.2",
"ncp": "^2.0.0",
"nyc": "15.1.0",
"pug": "3.0.3",
"q": "1.5.1",
- "sinon": "18.0.0",
+ "sinon": "19.0.2",
"stream-browserify": "3.0.0",
- "tsd": "0.31.1",
- "typescript": "5.5.4",
- "uuid": "10.0.0",
- "webpack": "5.94.0"
+ "tsd": "0.31.2",
+ "typescript": "5.6.3",
+ "uuid": "11.0.2",
+ "webpack": "5.96.1"
},
"directories": {
"lib": "./lib/mongoose"
@@ -92,7 +92,7 @@
"lint": "eslint .",
"lint-js": "eslint . --ext .js --ext .cjs",
"lint-ts": "eslint . --ext .ts",
- "lint-md": "markdownlint-cli2 \"**/*.md\"",
+ "lint-md": "markdownlint-cli2 \"**/*.md\" \"#node_modules\" \"#benchmarks\"",
"build-browser": "(rm ./dist/* || true) && node ./scripts/build-browser.js",
"prepublishOnly": "npm run build-browser",
"release": "git pull && git push origin master --tags && npm publish",
diff --git a/scripts/tsc-diagnostics-check.js b/scripts/tsc-diagnostics-check.js
index a1c4408cb79..a13c884ee02 100644
--- a/scripts/tsc-diagnostics-check.js
+++ b/scripts/tsc-diagnostics-check.js
@@ -3,7 +3,7 @@
const fs = require('fs');
const stdin = fs.readFileSync(0).toString('utf8');
-const maxInstantiations = isNaN(process.argv[2]) ? 250000 : parseInt(process.argv[2], 10);
+const maxInstantiations = isNaN(process.argv[2]) ? 275000 : parseInt(process.argv[2], 10);
console.log(stdin);
diff --git a/test/docs/cast.test.js b/test/docs/cast.test.js
index b2b9b4aef78..b24a9db657d 100644
--- a/test/docs/cast.test.js
+++ b/test/docs/cast.test.js
@@ -137,6 +137,26 @@ describe('Cast Tutorial', function() {
// acquit:ignore:end
});
+ it('strictQuery removes casted empty objects', async function() {
+ mongoose.deleteModel('Character');
+ const schema = new mongoose.Schema({ name: String, age: Number }, {
+ strictQuery: true
+ });
+ Character = mongoose.model('Character', schema);
+
+ const query = Character.findOne({
+ $or: [{ notInSchema: { $lt: 'not a number' } }],
+ $and: [{ name: 'abc' }, { age: { $gt: 18 } }, { notInSchema: { $lt: 'not a number' } }],
+ $nor: [{}] // should be kept
+ });
+
+ await query.exec();
+ query.getFilter(); // Empty object `{}`, Mongoose removes `notInSchema`
+ // acquit:ignore:start
+ assert.deepEqual(query.getFilter(), { $and: [{ name: 'abc' }, { age: { $gt: 18 } }], $nor: [{}] });
+ // acquit:ignore:end
+ });
+
it('implicit in', async function() {
// Normally wouldn't find anything because `name` is a string, but
// Mongoose automatically inserts `$in`
diff --git a/test/docs/transactions.test.js b/test/docs/transactions.test.js
index de2ecfc9952..2a63f07a984 100644
--- a/test/docs/transactions.test.js
+++ b/test/docs/transactions.test.js
@@ -565,6 +565,57 @@ describe('transactions', function() {
assert.equal(i, 3);
});
+ it('transaction() avoids duplicating atomic operations (gh-14848)', async function() {
+ db.deleteModel(/Test/);
+ const subItemSchema = new mongoose.Schema(
+ {
+ name: { type: String, required: true }
+ },
+ { _id: false }
+ );
+ const itemSchema = new mongoose.Schema(
+ {
+ name: { type: String, required: true },
+ subItems: { type: [subItemSchema], required: true }
+ },
+ { _id: false }
+ );
+ const schema = new mongoose.Schema({
+ items: { type: [itemSchema], required: true }
+ });
+ const Test = db.model('Test', schema);
+
+
+ await Test.createCollection();
+ await Test.deleteMany({});
+
+ const { _id } = await Test.create({
+ items: [
+ { name: 'test1', subItems: [{ name: 'x1' }] },
+ { name: 'test2', subItems: [{ name: 'x2' }] }
+ ]
+ });
+
+ let doc = await Test.findById(_id);
+
+ doc.items.push({ name: 'test3', subItems: [{ name: 'x3' }] });
+
+ let i = 0;
+ await db.transaction(async(session) => {
+ await doc.save({ session });
+ if (++i < 3) {
+ throw new mongoose.mongo.MongoServerError({
+ errorLabels: ['TransientTransactionError']
+ });
+ }
+ });
+
+ assert.equal(i, 3);
+
+ doc = await Test.findById(_id);
+ assert.equal(doc.items.length, 3);
+ });
+
it('doesnt apply schema write concern to transaction operations (gh-11382)', async function() {
db.deleteModel(/Test/);
const Test = db.model('Test', Schema({ status: String }, { writeConcern: { w: 'majority' } }));
diff --git a/test/document.test.js b/test/document.test.js
index 938b76a443c..813b34ebff9 100644
--- a/test/document.test.js
+++ b/test/document.test.js
@@ -8177,6 +8177,38 @@ describe('document', function() {
await person.save();
});
+ it('set() merge option with double nested', async function() {
+ const PersonSchema = new Schema({
+ info: {
+ address: {
+ city: String,
+ country: { type: String, default: 'UK' },
+ postcode: String
+ }
+ }
+ });
+
+ const Person = db.model('Person', PersonSchema);
+
+
+ const person = new Person({
+ info: {
+ address: {
+ country: 'United States',
+ city: 'New York'
+ }
+ }
+ });
+
+ const update = { info: { address: { postcode: '12H' } } };
+
+ person.set(update, undefined, { merge: true });
+
+ assert.equal(person.info.address.city, 'New York');
+ assert.equal(person.info.address.postcode, '12H');
+ assert.equal(person.info.address.country, 'United States');
+ });
+
it('setting single nested subdoc with timestamps (gh-8251)', async function() {
const ActivitySchema = Schema({ description: String }, { timestamps: true });
const RequestSchema = Schema({ activity: ActivitySchema });
@@ -12964,6 +12996,68 @@ describe('document', function() {
);
});
+ it('handles middleware on embedded discriminators on nested path defined using Schema.prototype.discriminator (gh-14961)', async function() {
+ const eventSchema = new Schema(
+ { message: String },
+ { discriminatorKey: 'kind', _id: false }
+ );
+
+ const clickedSchema = new Schema({
+ element: String
+ }, { _id: false });
+
+ // This is the discriminator which we will use to test middleware
+ const purchasedSchema = new Schema({
+ product: String
+ }, { _id: false });
+
+ let eventSchemaPreValidateCalls = 0;
+ let eventSchemaPreSaveCalls = 0;
+ eventSchema.pre('validate', function() {
+ ++eventSchemaPreValidateCalls;
+ });
+ eventSchema.pre('save', function() {
+ ++eventSchemaPreSaveCalls;
+ });
+
+ let purchasedSchemaPreValidateCalls = 0;
+ let purchasedSchemaPreSaveCalls = 0;
+ purchasedSchema.pre('validate', function() {
+ ++purchasedSchemaPreValidateCalls;
+ });
+ purchasedSchema.pre('save', function() {
+ ++purchasedSchemaPreSaveCalls;
+ });
+
+ eventSchema.discriminator('Clicked', clickedSchema);
+ eventSchema.discriminator('Purchased', purchasedSchema);
+
+ const trackSchema = new Schema({
+ event: eventSchema
+ });
+
+ // Test
+
+ const MyModel = db.model('track', trackSchema);
+ const doc = new MyModel({
+ event: {
+ kind: 'Purchased',
+ message: 'Test',
+ product: 'iPhone'
+ }
+ });
+
+ await doc.save();
+ assert.equal(doc.event.message, 'Test');
+ assert.equal(doc.event.kind, 'Purchased');
+ assert.equal(doc.event.product, 'iPhone');
+
+ assert.strictEqual(eventSchemaPreValidateCalls, 1);
+ assert.strictEqual(eventSchemaPreSaveCalls, 1);
+ assert.strictEqual(purchasedSchemaPreValidateCalls, 1);
+ assert.strictEqual(purchasedSchemaPreSaveCalls, 1);
+ });
+
it('handles reusing schema with embedded discriminators defined using Schema.prototype.discriminator (gh-14162)', async function() {
const discriminated = new Schema({
type: { type: Number, required: true }
@@ -13843,6 +13937,230 @@ describe('document', function() {
assert.strictEqual(requiredCalls[0], doc.config.prop);
assert.strictEqual(requiredCalls[1], doc.config.prop);
});
+
+ it('applies toObject() getters to 3 level deep subdocuments (gh-14840) (gh-14835)', async function() {
+ // Define nested schemas
+ const Level3Schema = new mongoose.Schema({
+ property: {
+ type: String,
+ get: (value) => value ? value.toUpperCase() : value
+ }
+ });
+
+ const Level2Schema = new mongoose.Schema({ level3: Level3Schema });
+ const Level1Schema = new mongoose.Schema({ level2: Level2Schema });
+ const MainSchema = new mongoose.Schema({ level1: Level1Schema });
+ const MainModel = db.model('Test', MainSchema);
+
+ const doc = await MainModel.create({
+ level1: {
+ level2: {
+ level3: {
+ property: 'testValue'
+ }
+ }
+ }
+ });
+
+ // Fetch and convert the document to an object with getters applied
+ const result = await MainModel.findById(doc._id);
+ const objectWithGetters = result.toObject({ getters: true, virtuals: false });
+ assert.strictEqual(objectWithGetters.level1.level2.level3.property, 'TESTVALUE');
+ });
+
+ it('handles inserting and saving large document with 10-level deep subdocs (gh-14897)', async function() {
+ const levels = 10;
+
+ let schema = new Schema({ test: { type: String, required: true } });
+ let doc = { test: 'gh-14897' };
+ for (let i = 0; i < levels; ++i) {
+ schema = new Schema({ level: Number, subdocs: [schema] });
+ doc = { level: (levels - i), subdocs: [{ ...doc }, { ...doc }] };
+ }
+
+ const Test = db.model('Test', schema);
+ const savedDoc = await Test.create(doc);
+
+ let cur = savedDoc;
+ for (let i = 0; i < levels - 1; ++i) {
+ cur = cur.subdocs[0];
+ }
+ cur.subdocs[0] = { test: 'updated' };
+ await savedDoc.save();
+ });
+
+ it('avoids flattening objectids on insertMany (gh-14935)', async function() {
+ const TestSchema = new Schema(
+ {
+ professionalId: {
+ type: Schema.Types.ObjectId
+ },
+ firstName: {
+ type: String
+ }
+ },
+ {
+ toObject: { flattenObjectIds: true }
+ }
+ );
+ const Test = db.model('Test', TestSchema);
+
+ const professionalId = new mongoose.Types.ObjectId();
+ await Test.insertMany([{ professionalId, firstName: 'test' }]);
+
+ const doc = await Test.findOne({ professionalId }).lean().orFail();
+ assert.ok(doc.professionalId instanceof mongoose.Types.ObjectId);
+ });
+
+ it('handles buffers stored as EJSON POJO (gh-14911)', async function() {
+ const pdfSchema = new mongoose.Schema({
+ pdfSettings: {
+ type: {
+ _id: false,
+ fileContent: { type: Buffer, required: true },
+ filePreview: { type: Buffer, required: true },
+ fileName: { type: String, required: true }
+ }
+ }
+ });
+ const PdfModel = db.model('Test', pdfSchema);
+
+ const _id = new mongoose.Types.ObjectId();
+ const buf = { $binary: Buffer.from('hello', 'utf8').toString('base64'), $type: '00' };
+ await PdfModel.collection.insertOne({
+ _id,
+ pdfSettings: {
+ fileContent: buf,
+ filePreview: buf,
+ fileName: 'sample.pdf'
+ }
+ });
+
+ const reloaded = await PdfModel.findById(_id);
+ assert.ok(Buffer.isBuffer(reloaded.pdfSettings.fileContent));
+ assert.strictEqual(reloaded.pdfSettings.fileContent.toString('utf8'), 'hello');
+ });
+
+ describe('gh-2306', function() {
+ it('allow define virtual on non-object path', function() {
+ const schema = new mongoose.Schema({ num: Number, str: String, nums: [Number] });
+ schema.path('nums').virtual('last').get(function() {
+ return this[this.length - 1];
+ });
+ schema.virtual('nums.first', { applyToArray: true }).get(function() {
+ return this[0];
+ });
+ schema.virtual('nums.selectedIndex', { applyToArray: true })
+ .get(function() {
+ return this.__selectedIndex;
+ })
+ .set(function(v) {
+ this.__selectedIndex = v;
+ });
+ const M = db.model('gh2306', schema);
+ const m = new M({ num: 2, str: 'a', nums: [1, 2, 3] });
+
+ assert.strictEqual(m.nums.last, 3);
+ assert.strictEqual(m.nums.first, 1);
+
+ assert.strictEqual(m.nums.selectedIndex, undefined);
+ m.nums.selectedIndex = 42;
+ assert.strictEqual(m.nums.__selectedIndex, 42);
+ });
+
+ it('works on document arrays', function() {
+ const schema = new mongoose.Schema({ books: [{ title: String, author: String }] });
+ schema.path('books').virtual('last').get(function() {
+ return this[this.length - 1];
+ });
+ schema.virtual('books.first', { applyToArray: true }).get(function() {
+ return this[0];
+ });
+ const M = db.model('Test', schema);
+ const m = new M({ books: [{ title: 'Casino Royale', author: 'Ian Fleming' }, { title: 'The Man With The Golden Gun', author: 'Ian Fleming' }] });
+
+ assert.strictEqual(m.books.first.title, 'Casino Royale');
+ assert.strictEqual(m.books.last.title, 'The Man With The Golden Gun');
+ });
+ });
+
+ it('clears modified subpaths when setting deeply nested subdoc to null (gh-14952)', async function() {
+ const currentMilestoneSchema = new Schema(
+ {
+ id: { type: String, required: true }
+ },
+ {
+ _id: false
+ }
+ );
+
+ const milestoneSchema = new Schema(
+ {
+ current: {
+ type: currentMilestoneSchema,
+ required: true
+ }
+ },
+ {
+ _id: false
+ }
+ );
+
+ const campaignSchema = new Schema(
+ {
+ milestones: {
+ type: milestoneSchema,
+ required: false
+ }
+ },
+ {
+ _id: false
+ }
+ );
+ const questSchema = new Schema(
+ {
+ campaign: { type: campaignSchema, required: false }
+ },
+ {
+ _id: false
+ }
+ );
+
+ const parentSchema = new Schema({
+ quests: [questSchema]
+ });
+
+ const ParentModel = db.model('Parent', parentSchema);
+ const doc = new ParentModel({
+ quests: [
+ {
+ campaign: {
+ milestones: {
+ current: {
+ id: 'milestone1'
+ }
+ }
+ }
+ }
+ ]
+ });
+
+ await doc.save();
+
+ // Set the nested schema to null
+ doc.quests[0].campaign.milestones.current = {
+ id: 'milestone1'
+ };
+ doc.quests[0].campaign.milestones.current = {
+ id: ''
+ };
+
+ doc.quests[0].campaign.milestones = null;
+ await doc.save();
+
+ const fromDb = await ParentModel.findById(doc._id).orFail();
+ assert.strictEqual(fromDb.quests[0].campaign.milestones, null);
+ });
});
describe('Check if instance function that is supplied in schema option is available', function() {
diff --git a/test/helpers/projection.isExclusive.test.js b/test/helpers/projection.isExclusive.test.js
new file mode 100644
index 00000000000..2fc4a16b990
--- /dev/null
+++ b/test/helpers/projection.isExclusive.test.js
@@ -0,0 +1,12 @@
+'use strict';
+
+const assert = require('assert');
+
+require('../common'); // required for side-effect setup (so that the default driver is set-up)
+const isExclusive = require('../../lib/helpers/projection/isExclusive');
+
+describe('isExclusive', function() {
+ it('handles $elemMatch (gh-14893)', function() {
+ assert.strictEqual(isExclusive({ field: { $elemMatch: { test: new Date('2024-06-01') } }, otherProp: 1 }), false);
+ });
+});
diff --git a/test/helpers/projection.isInclusive.test.js b/test/helpers/projection.isInclusive.test.js
new file mode 100644
index 00000000000..3bb93635a50
--- /dev/null
+++ b/test/helpers/projection.isInclusive.test.js
@@ -0,0 +1,12 @@
+'use strict';
+
+const assert = require('assert');
+
+require('../common'); // required for side-effect setup (so that the default driver is set-up)
+const isInclusive = require('../../lib/helpers/projection/isInclusive');
+
+describe('isInclusive', function() {
+ it('handles $elemMatch (gh-14893)', function() {
+ assert.strictEqual(isInclusive({ field: { $elemMatch: { test: new Date('2024-06-01') } }, otherProp: 1 }), true);
+ });
+});
diff --git a/test/model.populate.setting.test.js b/test/model.populate.setting.test.js
index bcf072e8d0c..d1c6700e044 100644
--- a/test/model.populate.setting.test.js
+++ b/test/model.populate.setting.test.js
@@ -152,7 +152,7 @@ describe('model: populate:', function() {
assert.equal(doc.fans[6], null);
const _id = construct[id]();
- doc.fans.addToSet(_id);
+ doc.fans.addToSet({ _id });
if (Buffer.isBuffer(_id)) {
assert.equal(doc.fans[7]._id.toString('utf8'), _id.toString('utf8'));
} else {
diff --git a/test/model.populate.test.js b/test/model.populate.test.js
index 7f0fe844eb8..f62e5886c9e 100644
--- a/test/model.populate.test.js
+++ b/test/model.populate.test.js
@@ -2582,6 +2582,91 @@ describe('model: populate:', function() {
assert.strictEqual(doc.parts[0].contents[1].item.url, 'https://youtube.com');
});
+ it('with refPath and array of ids with parent refPath', async function() {
+ const Child = db.model(
+ 'Child',
+ new mongoose.Schema({
+ fetched: Boolean
+ })
+ );
+
+ const Parent = db.model(
+ 'Parent',
+ new mongoose.Schema({
+ docArray: [
+ {
+ type: {
+ type: String,
+ enum: ['Child', 'OtherModel']
+ },
+ ids: [
+ {
+ type: mongoose.Schema.ObjectId,
+ refPath: 'docArray.type'
+ }
+ ]
+ }
+ ]
+ })
+ );
+ await Child.insertMany([
+ { _id: new mongoose.Types.ObjectId('6671a008596112f0729c2045'), fetched: true },
+ { _id: new mongoose.Types.ObjectId('667195f3596112f0728abe24'), fetched: true },
+ { _id: new mongoose.Types.ObjectId('6671bd39596112f072cda69c'), fetched: true },
+ { _id: new mongoose.Types.ObjectId('6672c351596112f072868565'), fetched: true },
+ { _id: new mongoose.Types.ObjectId('66734edd596112f0727304a2'), fetched: true },
+ { _id: new mongoose.Types.ObjectId('66726eff596112f072f8e834'), fetched: true },
+ { _id: new mongoose.Types.ObjectId('667267ff596112f072ed56b1'), fetched: true }
+ ]);
+ const { _id } = await Parent.create(
+ {
+ docArray: [
+ {},
+ {
+ type: 'Child',
+ ids: [
+ new mongoose.Types.ObjectId('6671a008596112f0729c2045'),
+ new mongoose.Types.ObjectId('667195f3596112f0728abe24'),
+ new mongoose.Types.ObjectId('6671bd39596112f072cda69c'),
+ new mongoose.Types.ObjectId('6672c351596112f072868565')
+ ]
+ },
+ {
+ type: 'Child',
+ ids: [new mongoose.Types.ObjectId('66734edd596112f0727304a2')]
+ },
+ {},
+ {
+ type: 'Child',
+ ids: [new mongoose.Types.ObjectId('66726eff596112f072f8e834')]
+ },
+ {},
+ {
+ type: 'Child',
+ ids: [new mongoose.Types.ObjectId('667267ff596112f072ed56b1')]
+ }
+ ]
+ }
+ );
+
+ const doc = await Parent.findById(_id).populate('docArray.ids').orFail();
+ assert.strictEqual(doc.docArray.length, 7);
+ assert.strictEqual(doc.docArray[0].ids.length, 0);
+ assert.strictEqual(doc.docArray[1].ids.length, 4);
+ assert.ok(doc.docArray[1].ids[0].fetched);
+ assert.ok(doc.docArray[1].ids[1].fetched);
+ assert.ok(doc.docArray[1].ids[2].fetched);
+ assert.ok(doc.docArray[1].ids[3].fetched);
+ assert.strictEqual(doc.docArray[2].ids.length, 1);
+ assert.ok(doc.docArray[2].ids[0].fetched);
+ assert.strictEqual(doc.docArray[3].ids.length, 0);
+ assert.strictEqual(doc.docArray[4].ids.length, 1);
+ assert.ok(doc.docArray[4].ids[0].fetched);
+ assert.strictEqual(doc.docArray[5].ids.length, 0);
+ assert.strictEqual(doc.docArray[6].ids.length, 1);
+ assert.ok(doc.docArray[6].ids[0].fetched);
+ });
+
it('with nested nonexistant refPath (gh-6457)', async function() {
const CommentSchema = new Schema({
text: String,
@@ -3556,6 +3641,52 @@ describe('model: populate:', function() {
assert.deepEqual(band.members.map(b => b.name).sort(), ['AA', 'AB']);
});
+ it('match prevents using $where', async function() {
+ const ParentSchema = new Schema({
+ name: String,
+ child: {
+ type: mongoose.Schema.Types.ObjectId,
+ ref: 'Child'
+ },
+ children: [{
+ type: mongoose.Schema.Types.ObjectId,
+ ref: 'Child'
+ }]
+ });
+
+ const ChildSchema = new Schema({
+ name: String
+ });
+ ChildSchema.virtual('parent', {
+ ref: 'Parent',
+ localField: '_id',
+ foreignField: 'parent'
+ });
+
+ const Parent = db.model('Parent', ParentSchema);
+ const Child = db.model('Child', ChildSchema);
+
+ const child = await Child.create({ name: 'Luke' });
+ const parent = await Parent.create({ name: 'Anakin', child: child._id });
+
+ await assert.rejects(
+ () => Parent.findOne().populate({ path: 'child', match: { $where: 'console.log("oops!");' } }),
+ /Cannot use \$where filter with populate\(\) match/
+ );
+ await assert.rejects(
+ () => Parent.find().populate({ path: 'child', match: { $where: 'console.log("oops!");' } }),
+ /Cannot use \$where filter with populate\(\) match/
+ );
+ await assert.rejects(
+ () => parent.populate({ path: 'child', match: { $where: 'console.log("oops!");' } }),
+ /Cannot use \$where filter with populate\(\) match/
+ );
+ await assert.rejects(
+ () => Child.find().populate({ path: 'parent', match: { $where: 'console.log("oops!");' } }),
+ /Cannot use \$where filter with populate\(\) match/
+ );
+ });
+
it('multiple source docs', async function() {
const PersonSchema = new Schema({
name: String,
@@ -11131,4 +11262,67 @@ describe('model: populate:', function() {
}
assert.equal(posts.length, 2);
});
+
+ it('depopulates if pushing ObjectId to a populated array (gh-1635)', async function() {
+ const ParentModel = db.model('Test', mongoose.Schema({
+ name: String,
+ children: [{ type: 'ObjectId', ref: 'Child' }]
+ }));
+ const ChildModel = db.model('Child', mongoose.Schema({ name: String }));
+
+ const children = await ChildModel.create([{ name: 'Luke' }, { name: 'Leia' }]);
+ const newChild = await ChildModel.create({ name: 'Taco' });
+ const { _id } = await ParentModel.create({ name: 'Anakin', children });
+
+ const doc = await ParentModel.findById(_id).populate('children');
+ doc.children.push(newChild._id);
+
+ assert.ok(doc.children[0] instanceof mongoose.Types.ObjectId);
+ assert.ok(doc.children[1] instanceof mongoose.Types.ObjectId);
+ assert.ok(doc.children[2] instanceof mongoose.Types.ObjectId);
+
+ await doc.save();
+
+ const fromDb = await ParentModel.findById(_id);
+ assert.equal(fromDb.children[0].toHexString(), children[0]._id.toHexString());
+ assert.equal(fromDb.children[1].toHexString(), children[1]._id.toHexString());
+ assert.equal(fromDb.children[2].toHexString(), newChild._id.toHexString());
+ });
+
+ it('handles converting uuid documents to strings when calling toObject() (gh-14869)', async function() {
+ const nodeSchema = new Schema({ _id: { type: 'UUID' }, name: 'String' });
+ const rootSchema = new Schema({
+ _id: { type: 'UUID' },
+ status: 'String',
+ node: [{ type: 'UUID', ref: 'Child' }]
+ });
+
+ const Node = db.model('Child', nodeSchema);
+ const Root = db.model('Parent', rootSchema);
+
+ const node = new Node({
+ _id: '65c7953e-c6e9-4c2f-8328-fe2de7df560d',
+ name: 'test'
+ });
+ await node.save();
+
+ const root = new Root({
+ _id: '05c7953e-c6e9-4c2f-8328-fe2de7df560d',
+ status: 'ok',
+ node: [node._id]
+ });
+ await root.save();
+
+ const foundRoot = await Root.findById(root._id).populate('node');
+
+ let doc = foundRoot.toJSON({ getters: true });
+ assert.strictEqual(doc._id, '05c7953e-c6e9-4c2f-8328-fe2de7df560d');
+ assert.strictEqual(doc.node.length, 1);
+ assert.strictEqual(doc.node[0]._id, '65c7953e-c6e9-4c2f-8328-fe2de7df560d');
+
+ doc = foundRoot.toObject({ getters: true });
+ assert.strictEqual(doc._id, '05c7953e-c6e9-4c2f-8328-fe2de7df560d');
+ assert.strictEqual(doc.node.length, 1);
+ assert.strictEqual(doc.node[0]._id, '65c7953e-c6e9-4c2f-8328-fe2de7df560d');
+ });
});
diff --git a/test/model.test.js b/test/model.test.js
index f6943d96fc7..73d2e809ef2 100644
--- a/test/model.test.js
+++ b/test/model.test.js
@@ -10,6 +10,7 @@ const assert = require('assert');
const { once } = require('events');
const random = require('./util').random;
const util = require('./util');
+const model = require('../lib/model');
const mongoose = start.mongoose;
const Schema = mongoose.Schema;
@@ -4124,6 +4125,100 @@ describe('Model', function() {
assert.equal(err.validationErrors[0].errors['num'].name, 'CastError');
});
+ it('handles array filters (gh-14978)', async function() {
+ const embedDiscriminatorSchema = new mongoose.Schema({
+ field1: String
+ });
+
+ const embedSchema = new mongoose.Schema({
+ field: String,
+ key: String
+ }, { discriminatorKey: 'key' });
+ embedSchema.discriminator('Type1', embedDiscriminatorSchema);
+
+ const testSchema = new mongoose.Schema({
+ testArray: [embedSchema]
+ });
+ const TestModel = db.model('Test', testSchema);
+
+ const test = new TestModel({
+ testArray: [{
+ key: 'Type1',
+ field: 'field',
+ field1: 'field1'
+ }]
+ });
+ const r1 = await test.save();
+ assert.equal(r1.testArray[0].field1, 'field1');
+
+ const field1update = 'field1 update';
+ await TestModel.bulkWrite([{
+ updateOne: {
+ filter: { _id: r1._id },
+ update: {
+ $set: {
+ 'testArray.$[element].field1': field1update,
+ 'testArray.$[element].nonexistentProp': field1update
+ }
+ },
+ arrayFilters: [
+ {
+ 'element._id': r1.testArray[0]._id,
+ 'element.key': 'Type1'
+ }
+ ]
+ }
+ }]);
+ const r2 = await TestModel.findById(r1._id).lean();
+ assert.equal(r2.testArray[0].field1, field1update);
+ assert.strictEqual(r2.testArray[0].nonexistentProp, undefined);
+ });
+
+ it('handles overwriteDiscriminatorKey (gh-15040)', async function() {
+ const dSchema1 = new mongoose.Schema({
+ field1: String
+ });
+ const dSchema2 = new mongoose.Schema({
+ field2: String
+ });
+ const baseSchema = new mongoose.Schema({
+ field: String,
+ key: String
+ }, { discriminatorKey: 'key' });
+ const type1Key = 'Type1';
+ const type2Key = 'Type2';
+
+ baseSchema.discriminator(type1Key, dSchema1);
+ baseSchema.discriminator(type2Key, dSchema2);
+
+ const TestModel = db.model('Test', baseSchema);
+
+ const test = new TestModel({
+ field: 'base field',
+ key: type1Key,
+ field1: 'field1'
+ });
+ const r1 = await test.save();
+ assert.equal(r1.field1, 'field1');
+ assert.equal(r1.key, type1Key);
+
+ const field2 = 'field2';
+ await TestModel.bulkWrite([{
+ updateOne: {
+ filter: { _id: r1._id },
+ update: {
+ key: type2Key,
+ field2
+ },
+ overwriteDiscriminatorKey: true
+ }
+ }]);
+
+ const r2 = await TestModel.findById(r1._id);
+ assert.equal(r2.key, type2Key);
+ assert.equal(r2.field2, field2);
+ });
+
it('with child timestamps and array filters (gh-7032)', async function() {
const childSchema = new Schema({ name: String }, { timestamps: true });
@@ -4961,6 +5056,31 @@ describe('Model', function() {
assert.strictEqual(indexes[1].background, false);
});
+ it('syncIndexes() supports hideIndexes (gh-14868)', async function() {
+ const opts = { autoIndex: false };
+ const schema = new Schema({ name: String }, opts);
+ schema.index({ name: 1 });
+
+ let M = db.model('Test', schema);
+ await M.syncIndexes({});
+
+ let indexes = await M.listIndexes();
+ assert.deepEqual(indexes[1].key, { name: 1 });
+ assert.ok(!indexes[1].hidden);
+
+ db.deleteModel(/Test/);
+ M = db.model('Test', new Schema({ name: String }, opts));
+ await M.syncIndexes({ hideIndexes: true });
+ indexes = await M.listIndexes();
+ assert.deepEqual(indexes[1].key, { name: 1 });
+ assert.ok(indexes[1].hidden);
+
+ await M.syncIndexes({});
+ indexes = await M.listIndexes();
+ assert.equal(indexes.length, 1);
+ assert.deepEqual(indexes[0].key, { _id: 1 });
+ });
+
it('should not drop a text index on .syncIndexes() call (gh-10850)', async function() {
const collation = { collation: { locale: 'simple' } };
const someSchema = new Schema({
@@ -5861,6 +5981,83 @@ describe('Model', function() {
});
+ it('custom statics that overwrite aggregate functions dont get hooks by default (gh-14903)', async function() {
+
+ const schema = new Schema({ name: String });
+
+ schema.statics.aggregate = function(pipeline) {
+ return model.aggregate.apply(this, [pipeline]);
+ };
+
+ let called = 0;
+ schema.pre('aggregate', function(next) {
+ ++called;
+ next();
+ });
+ const Model = db.model('Test', schema);
+
+ await Model.create({ name: 'foo' });
+
+ const res = await Model.aggregate([
+ {
+ $match: {
+ name: 'foo'
+ }
+ }
+ ]);
+
+ assert.ok(res[0].name);
+ assert.equal(called, 1);
+ });
+
+ it('custom statics that overwrite model functions dont get hooks by default', async function() {
+
+ const schema = new Schema({ name: String });
+
+ schema.statics.insertMany = function(docs) {
+ return model.insertMany.apply(this, [docs]);
+ };
+
+ let called = 0;
+ schema.pre('insertMany', function(next) {
+ ++called;
+ next();
+ });
+ const Model = db.model('Test', schema);
+
+ const res = await Model.insertMany([
+ { name: 'foo' },
+ { name: 'boo' }
+ ]);
+
+ assert.ok(res[0].name);
+ assert.ok(res[1].name);
+ assert.equal(called, 1);
+ });
+
+ it('custom statics that overwrite document functions dont get hooks by default', async function() {
+
+ const schema = new Schema({ name: String });
+
+ schema.statics.save = function() {
+ return 'foo';
+ };
+
+ let called = 0;
+ schema.pre('save', function(next) {
+ ++called;
+ next();
+ });
+
+ const Model = db.model('Test', schema);
+
+ const doc = await Model.save();
+
+ assert.ok(doc);
+ assert.equal(doc, 'foo');
+ assert.equal(called, 0);
+ });
+
it('error handling middleware passes saved doc (gh-7832)', async function() {
const schema = new Schema({ _id: Number });
@@ -6973,6 +7170,74 @@ describe('Model', function() {
assert.ok(err == null);
});
+ it('should error if no documents were inserted or updated (gh-14763)', async function() {
+ const fooSchema = new mongoose.Schema({
+ bar: { type: Number }
+ }, { optimisticConcurrency: true });
+ const TestModel = db.model('Test', fooSchema);
+
+ const foo = await TestModel.create({
+ bar: 0
+ });
+
+ // update 1
+ foo.bar = 1;
+ await foo.save();
+
+ // parallel update
+ const fooCopy = await TestModel.findById(foo._id);
+ fooCopy.bar = 99;
+ await fooCopy.save();
+
+ foo.bar = 2;
+ const err = await TestModel.bulkSave([foo]).then(() => null, err => err);
+ assert.equal(err.name, 'MongooseBulkSaveIncompleteError');
+ assert.equal(err.numDocumentsNotUpdated, 1);
+ });
+ it('should error if not all documents were inserted or updated (gh-14763)', async function() {
+ const fooSchema = new mongoose.Schema({
+ bar: { type: Number }
+ }, { optimisticConcurrency: true });
+ const TestModel = db.model('Test', fooSchema);
+
+ const errorDoc = await TestModel.create({ bar: 0 });
+ const okDoc = await TestModel.create({ bar: 0 });
+
+ // update 1
+ errorDoc.bar = 1;
+ await errorDoc.save();
+
+ // parallel update
+ const errorDocCopy = await TestModel.findById(errorDoc._id);
+ errorDocCopy.bar = 99;
+ await errorDocCopy.save();
+
+ errorDoc.bar = 2;
+ okDoc.bar = 2;
+ const err = await TestModel.bulkSave([errorDoc, okDoc]).then(() => null, err => err);
+ assert.equal(err.name, 'MongooseBulkSaveIncompleteError');
+ assert.equal(err.numDocumentsNotUpdated, 1);
+
+ const updatedOkDoc = await TestModel.findById(okDoc._id);
+ assert.equal(updatedOkDoc.bar, 2);
+ });
+ it('should error if there is a validation error', async function() {
+ const fooSchema = new mongoose.Schema({
+ bar: { type: Number }
+ }, { optimisticConcurrency: true });
+ const TestModel = db.model('Test', fooSchema);
+
+ const docs = [
+ new TestModel({ bar: 42 }),
+ new TestModel({ bar: 'taco' })
+ ];
+ const err = await TestModel.bulkSave(docs).then(() => null, err => err);
+ assert.equal(err.name, 'ValidationError');
+
+ // bulkSave() does not save any documents if any documents fail validation
+ const fromDb = await TestModel.find();
+ assert.equal(fromDb.length, 0);
+ });
it('Using bulkSave should not trigger an error (gh-11071)', async function() {
const pairSchema = mongoose.Schema({
@@ -7713,6 +7978,315 @@ describe('Model', function() {
docs = await User.find();
assert.deepStrictEqual(docs.map(doc => doc.age), [12, 12]);
});
+
+ describe('applyVirtuals', function() {
+ it('handles basic top-level virtuals', async function() {
+ const userSchema = new Schema({
+ name: String
+ });
+ userSchema.virtual('lowercase').get(function() {
+ return this.name.toLowerCase();
+ });
+ userSchema.virtual('uppercase').get(function() {
+ return this.name.toUpperCase();
+ });
+ const User = db.model('User', userSchema);
+
+ const res = User.applyVirtuals({ name: 'Taco' });
+ assert.equal(res.name, 'Taco');
+ assert.equal(res.lowercase, 'taco');
+ assert.equal(res.uppercase, 'TACO');
+ });
+
+ it('handles virtuals in subdocuments', async function() {
+ const userSchema = new Schema({
+ name: String
+ });
+ userSchema.virtual('lowercase').get(function() {
+ return this.name.toLowerCase();
+ });
+ userSchema.virtual('uppercase').get(function() {
+ return this.name.toUpperCase();
+ });
+ const groupSchema = new Schema({
+ name: String,
+ leader: userSchema,
+ members: [userSchema]
+ });
+ const Group = db.model('Group', groupSchema);
+
+ const res = Group.applyVirtuals({
+ name: 'Microsoft',
+ leader: { name: 'Bill' },
+ members: [{ name: 'John' }, { name: 'Steve' }]
+ });
+ assert.equal(res.name, 'Microsoft');
+ assert.equal(res.leader.name, 'Bill');
+ assert.equal(res.leader.uppercase, 'BILL');
+ assert.equal(res.leader.lowercase, 'bill');
+ assert.equal(res.members[0].name, 'John');
+ assert.equal(res.members[0].uppercase, 'JOHN');
+ assert.equal(res.members[0].lowercase, 'john');
+ assert.equal(res.members[1].name, 'Steve');
+ assert.equal(res.members[1].uppercase, 'STEVE');
+ assert.equal(res.members[1].lowercase, 'steve');
+ });
+
+ it('handles virtuals on nested paths', async function() {
+ const userSchema = new Schema({
+ name: {
+ first: String,
+ last: String
+ }
+ });
+ userSchema.virtual('name.firstUpper').get(function() {
+ return this.name.first.toUpperCase();
+ });
+ userSchema.virtual('name.lastLower').get(function() {
+ return this.name.last.toLowerCase();
+ });
+ const User = db.model('User', userSchema);
+
+ const res = User.applyVirtuals({
+ name: {
+ first: 'Bill',
+ last: 'Gates'
+ }
+ });
+ assert.equal(res.name.first, 'Bill');
+ assert.equal(res.name.last, 'Gates');
+ assert.equal(res.name.firstUpper, 'BILL');
+ assert.equal(res.name.lastLower, 'gates');
+ });
+
+ it('supports passing an array of virtuals to apply', async function() {
+ const userSchema = new Schema({
+ name: {
+ first: String,
+ last: String
+ }
+ });
+ userSchema.virtual('fullName').get(function() {
+ return `${this.name.first} ${this.name.last}`;
+ });
+ userSchema.virtual('name.firstUpper').get(function() {
+ return this.name.first.toUpperCase();
+ });
+ userSchema.virtual('name.lastLower').get(function() {
+ return this.name.last.toLowerCase();
+ });
+ const User = db.model('User', userSchema);
+
+ let res = User.applyVirtuals({
+ name: {
+ first: 'Bill',
+ last: 'Gates'
+ }
+ }, ['fullName', 'name.firstUpper']);
+ assert.strictEqual(res.name.first, 'Bill');
+ assert.strictEqual(res.name.last, 'Gates');
+ assert.strictEqual(res.fullName, 'Bill Gates');
+ assert.strictEqual(res.name.firstUpper, 'BILL');
+ assert.strictEqual(res.name.lastLower, undefined);
+
+ res = User.applyVirtuals({
+ name: {
+ first: 'Bill',
+ last: 'Gates'
+ }
+ }, ['name.lastLower']);
+ assert.strictEqual(res.name.first, 'Bill');
+ assert.strictEqual(res.name.last, 'Gates');
+ assert.strictEqual(res.fullName, undefined);
+ assert.strictEqual(res.name.firstUpper, undefined);
+ assert.strictEqual(res.name.lastLower, 'gates');
+ });
+
+ it('sets populate virtuals to `null` if `justOne`', async function() {
+ const userSchema = new Schema({
+ name: {
+ first: String,
+ last: String
+ },
+ friendId: {
+ type: 'ObjectId'
+ }
+ });
+ userSchema.virtual('fullName').get(function() {
+ return `${this.name.first} ${this.name.last}`;
+ });
+ userSchema.virtual('friend', {
+ ref: 'User',
+ localField: 'friendId',
+ foreignField: '_id',
+ justOne: true
+ });
+ const User = db.model('User', userSchema);
+
+ const friendId = new mongoose.Types.ObjectId();
+ const res = User.applyVirtuals({
+ name: {
+ first: 'Bill',
+ last: 'Gates'
+ },
+ friendId
+ });
+ assert.strictEqual(res.name.first, 'Bill');
+ assert.strictEqual(res.name.last, 'Gates');
+ assert.strictEqual(res.fullName, 'Bill Gates');
+ assert.strictEqual(res.friend, null);
+ });
+ });
+
+ describe('applyTimestamps', function() {
+ it('handles basic top-level timestamps', async function() {
+ const startTime = new Date();
+ const userSchema = new Schema({
+ name: String
+ }, { timestamps: true });
+ const User = db.model('User', userSchema);
+
+ const obj = { name: 'test' };
+ User.applyTimestamps(obj);
+ assert.equal(obj.name, 'test');
+ assert.ok(obj.createdAt instanceof Date);
+ assert.ok(obj.updatedAt instanceof Date);
+ assert.ok(obj.createdAt.valueOf() >= startTime.valueOf());
+ assert.ok(obj.updatedAt.valueOf() >= startTime.valueOf());
+ });
+
+ it('no-op if timestamps not set', async function() {
+ const userSchema = new Schema({
+ name: String
+ });
+ const User = db.model('User', userSchema);
+
+ const obj = { name: 'test' };
+ User.applyTimestamps(obj);
+ assert.equal(obj.name, 'test');
+ assert.ok(!('createdAt' in obj));
+ assert.ok(!('updatedAt' in obj));
+ });
+
+ it('handles custom timestamp property names', async function() {
+ const startTime = new Date();
+ const userSchema = new Schema({
+ name: String
+ }, { timestamps: { createdAt: 'createdOn', updatedAt: 'updatedOn' } });
+ const User = db.model('User', userSchema);
+
+ const obj = { name: 'test' };
+ User.applyTimestamps(obj);
+ assert.equal(obj.name, 'test');
+ assert.ok(obj.createdOn instanceof Date);
+ assert.ok(obj.updatedOn instanceof Date);
+ assert.ok(obj.createdOn.valueOf() >= startTime.valueOf());
+ assert.ok(obj.updatedOn.valueOf() >= startTime.valueOf());
+ assert.ok(!('createdAt' in obj));
+ assert.ok(!('updatedAt' in obj));
+ });
+
+ it('applies timestamps to subdocs', async function() {
+ const startTime = new Date();
+ const userSchema = new Schema({
+ name: String,
+ posts: [new Schema({
+ title: String,
+ content: String
+ }, { timestamps: true })],
+ address: new Schema({
+ city: String,
+ country: String
+ }, { timestamps: true })
+ }, { timestamps: true });
+ const User = db.model('User', userSchema);
+
+ const obj = {
+ name: 'test',
+ posts: [{ title: 'Post 1', content: 'Content 1' }],
+ address: { city: 'New York', country: 'USA' }
+ };
+ User.applyTimestamps(obj);
+ assert.equal(obj.name, 'test');
+ assert.ok(obj.createdAt instanceof Date);
+ assert.ok(obj.updatedAt instanceof Date);
+ assert.ok(obj.createdAt.valueOf() >= startTime.valueOf());
+ assert.ok(obj.updatedAt.valueOf() >= startTime.valueOf());
+ assert.ok(obj.posts[0].createdAt instanceof Date);
+ assert.ok(obj.posts[0].updatedAt instanceof Date);
+ assert.ok(obj.address.createdAt instanceof Date);
+ assert.ok(obj.address.updatedAt instanceof Date);
+ });
+
+ it('supports isUpdate and currentTime options', async function() {
+ const userSchema = new Schema({
+ name: String,
+ post: new Schema({
+ title: String,
+ content: String
+ }, { timestamps: true })
+ }, { timestamps: true });
+ const User = db.model('User', userSchema);
+
+ const obj = {
+ name: 'test',
+ post: { title: 'Post 1', content: 'Content 1' }
+ };
+ User.applyTimestamps(obj, { isUpdate: true, currentTime: () => new Date('2023-06-01T18:00:00.000Z') });
+ assert.equal(obj.name, 'test');
+ assert.ok(!('createdAt' in obj));
+ assert.ok(obj.updatedAt instanceof Date);
+ assert.equal(obj.updatedAt.valueOf(), new Date('2023-06-01T18:00:00.000Z').valueOf());
+ assert.ok(!('createdAt' in obj.post));
+ assert.ok(obj.post.updatedAt.valueOf(), new Date('2023-06-01T18:00:00.000Z').valueOf());
+ });
+ });
+
+ describe('diffIndexes()', function() {
+ it('avoids trying to drop timeseries collections (gh-14984)', async function() {
+ const version = await start.mongodVersion();
+ if (version[0] < 5) {
+ this.skip();
+ return;
+ }
+
+ const schema = new mongoose.Schema(
+ {
+ time: {
+ type: Date
+ },
+ deviceId: {
+ type: String
+ }
+ },
+ {
+ timeseries: {
+ timeField: 'time',
+ metaField: 'deviceId',
+ granularity: 'seconds'
+ },
+ autoCreate: false
+ }
+ );
+
+ const TestModel = db.model(
+ 'TimeSeriesTest',
+ schema,
+ 'gh14984'
+ );
+
+ await db.dropCollection('gh14984').catch(err => {
+ if (err.codeName === 'NamespaceNotFound') {
+ return;
+ }
+ throw err;
+ });
+ await TestModel.createCollection();
+
+ const { toDrop } = await TestModel.diffIndexes();
+ assert.deepStrictEqual(toDrop, []);
+ });
+ });
});
diff --git a/test/model.updateOne.test.js b/test/model.updateOne.test.js
index e9ae3ce43a5..e1fdcef1447 100644
--- a/test/model.updateOne.test.js
+++ b/test/model.updateOne.test.js
@@ -1078,9 +1078,29 @@ describe('model: updateOne:', function() {
const Model = db.model('Test', schema);
const update = { $rename: { foo: 'bar' } };
- await Model.create({ foo: Date.now() });
- const res = await Model.updateOne({}, update, { multi: true });
- assert.equal(res.modifiedCount, 1);
+ const foo = Date.now();
+ const { _id } = await Model.create({ foo });
+ await Model.updateOne({}, update);
+ const doc = await Model.findById(_id);
+ assert.equal(doc.bar.valueOf(), foo.valueOf());
+ assert.equal(doc.foo, undefined);
+ });
+
+ it('throws CastError if $rename fails to cast to string (gh-1845)', async function() {
+ const schema = new Schema({ foo: Date, bar: Date });
+ const Model = db.model('Test', schema);
+
+ let err = await Model.updateOne({}, { $rename: { foo: { prop: 'baz' } } }).then(() => null, err => err);
+ assert.equal(err.name, 'CastError');
+ assert.ok(err.message.includes('foo.$rename'));
+
+ err = await Model.updateOne({}, { $rename: { foo: null } }).then(() => null, err => err);
+ assert.equal(err.name, 'CastError');
+ assert.ok(err.message.includes('foo.$rename'));
+
+ err = await Model.updateOne({}, { $rename: { foo: undefined } }).then(() => null, err => err);
+ assert.equal(err.name, 'CastError');
+ assert.ok(err.message.includes('foo.$rename'));
});
it('allows objects with positional operator (gh-3185)', async function() {
@@ -2506,6 +2526,29 @@ describe('model: updateOne: ', function() {
assert.ok(doc.createdAt.valueOf() >= start);
});
+ it('overwriting immutable createdAt (gh-8619)', async function() {
+ const start = new Date().valueOf();
+ const schema = Schema({
+ createdAt: {
+ type: mongoose.Schema.Types.Date,
+ immutable: true
+ },
+ name: String
+ }, { timestamps: true });
+
+ const Model = db.model('Test', schema);
+
+ await Model.create({ name: 'gh-8619' });
+ let doc = await Model.collection.findOne({ name: 'gh-8619' });
+ assert.ok(doc.createdAt.valueOf() >= start);
+
+ const createdAt = new Date('2011-06-01');
+ assert.ok(createdAt.valueOf() < start.valueOf());
+ await Model.updateOne({ _id: doc._id }, { name: 'gh-8619 update', createdAt }, { overwriteImmutable: true, timestamps: false });
+ doc = await Model.collection.findOne({ name: 'gh-8619 update' });
+ assert.equal(doc.createdAt.valueOf(), createdAt.valueOf());
+ });
+
it('conditional immutable (gh-8001)', async function() {
const schema = Schema({
test: {
diff --git a/test/query.cursor.test.js b/test/query.cursor.test.js
index d80264c5f2d..e7265be1d06 100644
--- a/test/query.cursor.test.js
+++ b/test/query.cursor.test.js
@@ -4,6 +4,7 @@
'use strict';
+const { once } = require('events');
const start = require('./common');
const assert = require('assert');
@@ -920,6 +921,34 @@ describe('QueryCursor', function() {
assert.ok(cursor.cursor);
assert.equal(driverCursor, cursor.cursor);
});
+
+ it('handles destroy() (gh-14966)', async function() {
+ db.deleteModel(/Test/);
+ const TestModel = db.model('Test', mongoose.Schema({ name: String }));
+
+ const stream = await TestModel.find().cursor();
+ await once(stream, 'cursor');
+ assert.ok(!stream.cursor.closed);
+
+ stream.destroy();
+
+ await once(stream.cursor, 'close');
+ assert.ok(stream.destroyed);
+ assert.ok(stream.cursor.closed);
+ });
+
+ it('handles destroy() before cursor is created (gh-14966)', async function() {
+ db.deleteModel(/Test/);
+ const TestModel = db.model('Test', mongoose.Schema({ name: String }));
+
+ const stream = await TestModel.find().cursor();
+ assert.ok(!stream.cursor);
+ stream.destroy();
+
+ await once(stream, 'cursor');
+ assert.ok(stream.destroyed);
+ assert.ok(stream.cursor.closed);
+ });
});
async function delay(ms) {
diff --git a/test/query.test.js b/test/query.test.js
index a3de50044e3..bca5f706cfd 100644
--- a/test/query.test.js
+++ b/test/query.test.js
@@ -3520,6 +3520,21 @@ describe('Query', function() {
assert.ifError(q.error());
assert.deepEqual(q._conditions, { username: 'val', pwd: { $gt: null } });
});
+
+ it('sanitizeFilter disables implicit $in (gh-14657)', function() {
+ const schema = new mongoose.Schema({
+ name: {
+ type: String
+ }
+ });
+ const Test = db.model('Test', schema);
+
+ const q = Test.find({ name: ['foobar'] }).setOptions({ sanitizeFilter: true });
+ q._castConditions();
+ assert.ok(q.error());
+ assert.equal(q.error().name, 'CastError');
+ });
+
it('should not error when $not is used with $size (gh-10716)', async function() {
const barSchema = Schema({
bar: String
@@ -4339,4 +4354,62 @@ describe('Query', function() {
await Person.find({ $and: filter });
assert.deepStrictEqual(filter, [{ name: 'Me', age: '20' }, { name: 'You', age: '50' }]);
});
+
+ describe('schemaLevelProjections (gh-11474)', function() {
+ it('disables schema-level select: false', async function() {
+ const userSchema = new Schema({
+ email: { type: String, required: true },
+ passwordHash: { type: String, select: false, required: true }
+ });
+ const UserModel = db.model('User', userSchema);
+
+ const { _id } = await UserModel.create({ email: 'test', passwordHash: 'gh-11474' });
+
+ const doc = await UserModel.findById(_id).orFail().schemaLevelProjections(false);
+ assert.strictEqual(doc.email, 'test');
+ assert.strictEqual(doc.passwordHash, 'gh-11474');
+ });
+
+ it('disables schema-level select: true', async function() {
+ const userSchema = new Schema({
+ email: { type: String, required: true, select: true },
+ otherProp: String
+ });
+ const UserModel = db.model('User', userSchema);
+
+ const { _id } = await UserModel.create({ email: 'test', otherProp: 'gh-11474 select true' });
+
+ const doc = await UserModel.findById(_id).select('otherProp').orFail().schemaLevelProjections(false);
+ assert.strictEqual(doc.email, undefined);
+ assert.strictEqual(doc.otherProp, 'gh-11474 select true');
+ });
+
+ it('works via setOptions()', async function() {
+ const userSchema = new Schema({
+ email: { type: String, required: true },
+ passwordHash: { type: String, select: false, required: true }
+ });
+ const UserModel = db.model('User', userSchema);
+
+ const { _id } = await UserModel.create({ email: 'test', passwordHash: 'gh-11474' });
+
+ const doc = await UserModel.findById(_id).orFail().setOptions({ schemaLevelProjections: false });
+ assert.strictEqual(doc.email, 'test');
+ assert.strictEqual(doc.passwordHash, 'gh-11474');
+ });
+
+ it('disabled via truthy value', async function() {
+ const userSchema = new Schema({
+ email: { type: String, required: true },
+ passwordHash: { type: String, select: false, required: true }
+ });
+ const UserModel = db.model('User', userSchema);
+
+ const { _id } = await UserModel.create({ email: 'test', passwordHash: 'gh-11474' });
+
+ const doc = await UserModel.findById(_id).orFail().schemaLevelProjections(true);
+ assert.strictEqual(doc.email, 'test');
+ assert.strictEqual(doc.passwordHash, undefined);
+ });
+ });
});
diff --git a/test/schematype.test.js b/test/schematype.test.js
index ad8367d0f61..725e21966a4 100644
--- a/test/schematype.test.js
+++ b/test/schematype.test.js
@@ -315,4 +315,13 @@ describe('schematype', function() {
/password must be at least six characters/
);
});
+
+ it('supports getEmbeddedSchemaType() (gh-8389)', function() {
+ const schema = new Schema({ name: String, tags: [String] });
+ assert.strictEqual(schema.path('name').getEmbeddedSchemaType(), undefined);
+ const schemaType = schema.path('tags').getEmbeddedSchemaType();
+ assert.ok(schemaType);
+ assert.equal(schemaType.instance, 'String');
+ assert.equal(schemaType.path, 'tags');
+ });
});
diff --git a/test/types/check-types-filename.js b/test/types/check-types-filename.js
index 8dc0f3f6de8..303c0b05dd0 100644
--- a/test/types/check-types-filename.js
+++ b/test/types/check-types-filename.js
@@ -18,7 +18,7 @@ const checkFolder = (folder) => {
}
continue;
} else {
- console.error('File ' + entry + ' is not having a valid file-extension.\n');
+ console.error('File ' + entry + ' does not have a valid extension, must be .d.ts or .gitignore.\n');
process.exit(1);
}
}
diff --git a/test/types/document.test.ts b/test/types/document.test.ts
index 84451edf0f2..19ca1083296 100644
--- a/test/types/document.test.ts
+++ b/test/types/document.test.ts
@@ -359,3 +359,67 @@ function gh13738() {
expectType(person.get('dob'));
expectType<{ theme: string; alerts: { sms: boolean } }>(person.get('settings'));
}
+
+async function gh12959() {
+ const subdocSchema = new Schema({ foo: { type: 'string', required: true } });
+
+ const schema = new Schema({
+ subdocArray: { type: [subdocSchema], required: true }
+ });
+
+ const Model = model('test', schema);
+
+ const doc = await Model.findById('id').orFail();
+ expectType(doc._id);
+ expectType(doc.__v);
+
+ expectError(doc.subdocArray[0].__v);
+}
+
+async function gh14876() {
+ type CarObjectInterface = {
+ make: string;
+ model: string;
+ year: number;
+ owner: Types.ObjectId;
+ };
+ const carSchema = new Schema({
+ make: { type: String, required: true },
+ model: { type: String, required: true },
+ year: { type: Number, required: true },
+ owner: { type: Schema.Types.ObjectId, ref: 'User' }
+ });
+
+ type UserObjectInterface = {
+ name: string;
+ age: number;
+ };
+ const userSchema = new Schema({
+ name: String,
+ age: Number
+ });
+
+ const Car = model('Car', carSchema);
+ const User = model('User', userSchema);
+
+ const user = await User.create({ name: 'John', age: 25 });
+ const car = await Car.create({
+ make: 'Toyota',
+ model: 'Camry',
+ year: 2020,
+ owner: user._id
+ });
+
+ const populatedCar = await Car.findById(car._id)
+ .populate<{ owner: UserObjectInterface }>('owner')
+ .exec();
+
+ if (!populatedCar) return;
+
+ console.log(populatedCar.owner.name); // outputs John
+
+ const depopulatedCar = populatedCar.depopulate<{ owner: Types.ObjectId }>('owner');
+
+ expectType(populatedCar.owner);
+ expectType(depopulatedCar.owner);
+}
diff --git a/test/types/inferrawdoctype.test.ts b/test/types/inferrawdoctype.test.ts
new file mode 100644
index 00000000000..7d162b03975
--- /dev/null
+++ b/test/types/inferrawdoctype.test.ts
@@ -0,0 +1,25 @@
+import { InferRawDocType } from 'mongoose';
+import { expectType, expectError } from 'tsd';
+
+function gh14839() {
+ const schemaDefinition = {
+ email: {
+ type: String,
+ trim: true,
+ required: true,
+ unique: true,
+ lowercase: true
+ },
+ password: {
+ type: String,
+ required: true
+ },
+ dateOfBirth: {
+ type: Date,
+ required: true
+ }
+ };
+
+ type UserType = InferRawDocType< typeof schemaDefinition>;
+ expectType<{ email: string, password: string, dateOfBirth: Date }>({} as UserType);
+}
diff --git a/test/types/models.test.ts b/test/types/models.test.ts
index fbe411225f0..2e7c536eb33 100644
--- a/test/types/models.test.ts
+++ b/test/types/models.test.ts
@@ -14,8 +14,8 @@ import mongoose, {
UpdateWriteOpResult,
AggregateOptions,
WithLevel1NestedPaths,
- NestedPaths,
- InferSchemaType
+ InferSchemaType,
+ DeleteResult
} from 'mongoose';
import { expectAssignable, expectError, expectType } from 'tsd';
import { AutoTypedSchemaType, autoTypedSchema } from './schema.test';
@@ -216,7 +216,7 @@ function find() {
Project.find({ name: 'Hello' });
// just callback; this is no longer supported on .find()
- expectError(Project.find((error: CallbackError, result: IProject[]) => console.log(error, result)));
+ Project.find((error: CallbackError, result: IProject[]) => console.log(error, result));
// filter + projection
Project.find({}, undefined);
@@ -514,7 +514,7 @@ function gh12100() {
function modelRemoveOptions() {
const cmodel = model('Test', new Schema());
- cmodel.deleteOne({}, {});
+ const res: DeleteResult = await cmodel.deleteOne({}, {});
}
async function gh12286() {
@@ -623,9 +623,9 @@ async function gh13151() {
const TestModel = model('Test', TestSchema);
const test = await TestModel.findOne().lean();
- expectType(test);
+ expectType(test);
if (!test) return;
- expectType(test);
+ expectType(test);
}
function gh13206() {
@@ -661,7 +661,7 @@ async function gh13705() {
const schema = new Schema({ name: String });
const TestModel = model('Test', schema);
- type ExpectedLeanDoc = (mongoose.FlattenMaps<{ name?: string | null }> & { _id: mongoose.Types.ObjectId });
+ type ExpectedLeanDoc = (mongoose.FlattenMaps<{ name?: string | null }> & { _id: mongoose.Types.ObjectId } & { __v: number });
const findByIdRes = await TestModel.findById('0'.repeat(24), undefined, { lean: true });
expectType(findByIdRes);
@@ -977,29 +977,3 @@ function testWithLevel1NestedPaths() {
'foo.one': string | null | undefined
}>({} as Test2);
}
-
-function gh14764TestFilterQueryRestrictions() {
- const TestModel = model<{ validKey: number }>('Test', new Schema({}));
- // A key not in the schema should be invalid
- expectError(TestModel.find({ invalidKey: 0 }));
- // A key not in the schema should be invalid for simple root operators
- expectError(TestModel.find({ $and: [{ invalidKey: 0 }] }));
-
- // Any "nested" keys should be valid
- TestModel.find({ 'validKey.subkey': 0 });
-
- // And deeply "nested" keys should be valid
- TestModel.find({ 'validKey.deep.nested.key': 0 });
- TestModel.find({ validKey: { deep: { nested: { key: 0 } } } });
-
- // Any Query should be accepted as the root argument (due to merge support)
- TestModel.find(TestModel.find());
- // A Query should not be a valid type for a FilterQuery within an op like $and
- expectError(TestModel.find({ $and: [TestModel.find()] }));
-
- const id = new Types.ObjectId();
- // Any ObjectId should be accepted as the root argument
- TestModel.find(id);
- // A ObjectId should not be a valid type for a FilterQuery within an op like $and
- expectError(TestModel.find({ $and: [id] }));
-}
diff --git a/test/types/populate.test.ts b/test/types/populate.test.ts
index 5ac5f76e83a..9829fa82791 100644
--- a/test/types/populate.test.ts
+++ b/test/types/populate.test.ts
@@ -248,7 +248,6 @@ async function _11532() {
if (!leanResult) return;
expectType(leanResult.child.name);
- expectError(leanResult?.__v);
}
async function gh11710() {
diff --git a/test/types/queries.test.ts b/test/types/queries.test.ts
index 5396f384cad..35ff6f24d6e 100644
--- a/test/types/queries.test.ts
+++ b/test/types/queries.test.ts
@@ -334,7 +334,7 @@ function gh14397() {
age: number;
};
- const id: string = 'Test Id';
+ const id = 'Test Id';
let idCondition: Condition['id']>;
let filter: FilterQuery>;
@@ -672,3 +672,9 @@ async function gh14545() {
const myProjection = await M.findOne({}).select({ prop: 1 }).exec();
expectType(myProjection);
}
+
+function gh14841() {
+ const filter: FilterQuery<{ owners: string[] }> = {
+ $expr: { $lt: [{ $size: '$owners' }, 10] }
+ };
+}
diff --git a/test/types/schema.test.ts b/test/types/schema.test.ts
index 04828bc4f17..82988a05b12 100644
--- a/test/types/schema.test.ts
+++ b/test/types/schema.test.ts
@@ -10,6 +10,7 @@ import {
InferRawDocType,
InferSchemaType,
InsertManyOptions,
+ JSONSerialized,
ObtainDocumentType,
ObtainSchemaGeneric,
ResolveSchemaOptions,
@@ -20,8 +21,10 @@ import {
Types,
Query,
model,
- ValidateOpts
+ ValidateOpts,
+ BufferToBinary
} from 'mongoose';
+import { Binary } from 'mongodb';
import { IsPathRequired } from '../../types/inferschematype';
import { expectType, expectError, expectAssignable } from 'tsd';
import { ObtainDocumentPathType, ResolvePathType } from '../../types/inferschematype';
@@ -917,7 +920,7 @@ async function gh12593() {
expectType(doc2.x);
const doc3 = await Test.findOne({}).orFail().lean();
- expectType(doc3.x);
+ expectType(doc3.x);
const arrSchema = new Schema({ arr: [{ type: Schema.Types.UUID }] });
@@ -1634,3 +1637,90 @@ function gh14825() {
type SchemaType = InferSchemaType;
expectAssignable({} as SchemaType);
}
+
+function gh8389() {
+ const schema = new Schema({ name: String, tags: [String] });
+
+ expectAssignable | undefined>(schema.path('name').getEmbeddedSchemaType());
+ expectAssignable | undefined>(schema.path('tags').getEmbeddedSchemaType());
+}
+
+function gh14879() {
+ Schema.Types.String.setters.push((val?: unknown) => typeof val === 'string' ? val.trim() : val);
+}
+
+async function gh14950() {
+ const SightingSchema = new Schema(
+ {
+ _id: { type: Schema.Types.ObjectId, required: true },
+ location: {
+ type: { type: String, required: true },
+ coordinates: [{ type: Number }]
+ }
+ }
+ );
+
+ const TestModel = model('Test', SightingSchema);
+ const doc = await TestModel.findOne().orFail();
+
+ expectType(doc.location!.type);
+ expectType(doc.location!.coordinates);
+}
+
+async function gh14902() {
+ const exampleSchema = new Schema({
+ image: { type: Buffer },
+ subdoc: {
+ type: new Schema({
+ testBuf: Buffer
+ })
+ }
+ });
+ const Test = model('Test', exampleSchema);
+
+ const doc = await Test.findOne().lean().orFail();
+ expectType(doc.image);
+ expectType(doc.subdoc!.testBuf);
+}
+
+async function gh14451() {
+ const exampleSchema = new Schema({
+ myId: { type: 'ObjectId' },
+ myRequiredId: { type: 'ObjectId', required: true },
+ myBuf: { type: Buffer, required: true },
+ subdoc: {
+ type: new Schema({
+ subdocProp: Date
+ })
+ },
+ docArr: [{ nums: [Number], times: [{ type: Date }] }],
+ myMap: {
+ type: Map,
+ of: String
+ }
+ });
+
+ const Test = model('Test', exampleSchema);
+
+ type TestJSON = JSONSerialized>;
+ expectType<{
+ myId?: string | undefined | null,
+ myRequiredId: string,
+ myBuf: { type: 'buffer', data: number[] },
+ subdoc?: {
+ subdocProp?: string | undefined | null
+ } | null,
+ docArr: { nums: number[], times: string[] }[],
+ myMap?: Record | null | undefined
+ }>({} as TestJSON);
+}
+
+async function gh12959() {
+ const schema = new Schema({ name: String });
+ const TestModel = model('Test', schema);
+
+ const doc = await TestModel.findOne().orFail();
+ expectType(doc.__v);
+ const leanDoc = await TestModel.findOne().lean().orFail();
+ expectType(leanDoc.__v);
+}
diff --git a/types/connection.d.ts b/types/connection.d.ts
index 08165549a86..e37914bdb4f 100644
--- a/types/connection.d.ts
+++ b/types/connection.d.ts
@@ -50,6 +50,12 @@ declare module 'mongoose' {
autoIndex?: boolean;
/** Set to `false` to disable Mongoose automatically calling `createCollection()` on every model created on this connection. */
autoCreate?: boolean;
+ /**
+ * Sanitizes query filters against [query selector injection attacks](
+ * https://thecodebarbarian.com/2014/09/04/defending-against-query-selector-injection-attacks.html
+ * ) by wrapping any nested objects that have a property whose name starts with $ in a $eq.
+ */
+ sanitizeFilter?: boolean;
}
class Connection extends events.EventEmitter implements SessionStarter {
diff --git a/types/cursor.d.ts b/types/cursor.d.ts
index 888dd04c79b..8c76599e584 100644
--- a/types/cursor.d.ts
+++ b/types/cursor.d.ts
@@ -8,10 +8,13 @@ declare module 'mongoose' {
parallel?: number;
batchSize?: number;
continueOnError?: boolean;
+ signal?: AbortSignal;
}
- class Cursor extends stream.Readable {
- [Symbol.asyncIterator](): AsyncIterableIterator;
+ class Cursor extends stream.Readable {
+ [Symbol.asyncIterator](): Cursor, Options, IteratorResult>;
+
+ [Symbol.asyncDispose](): Promise;
/**
* Adds a [cursor flag](https://mongodb.github.io/node-mongodb-native/4.9/classes/FindCursor.html#addCursorFlag).
@@ -25,6 +28,12 @@ declare module 'mongoose' {
*/
close(): Promise;
+ /**
+ * Destroy this cursor, closing the underlying cursor. Will stop streaming
+ * and subsequent calls to `next()` will error.
+ */
+ destroy(): this;
+
/**
* Rewind this cursor to its uninitialized state. Any options that are present on the cursor will
* remain in effect. Iterating this cursor will cause new queries to be sent to the server, even
@@ -51,7 +60,7 @@ declare module 'mongoose' {
* Get the next document from this cursor. Will return `null` when there are
* no documents left.
*/
- next(): Promise;
+ next(): Promise;
options: Options;
}
diff --git a/types/document.d.ts b/types/document.d.ts
index 5557269783f..e52169dfca4 100644
--- a/types/document.d.ts
+++ b/types/document.d.ts
@@ -24,9 +24,6 @@ declare module 'mongoose' {
/** This documents _id. */
_id: T;
- /** This documents __v. */
- __v?: any;
-
/** Assert that a given path or paths is populated. Throws an error if not populated. */
$assertPopulated(path: string | string[], values?: Partial): Omit & Paths;
@@ -138,7 +135,7 @@ declare module 'mongoose' {
* Takes a populated field and returns it to its unpopulated state. If called with
* no arguments, then all populated fields are returned to their unpopulated state.
*/
- depopulate(path?: string | string[]): this;
+ depopulate(path?: string | string[]): MergeType;
/**
* Returns the list of paths that have been directly modified. A direct
@@ -259,10 +256,17 @@ declare module 'mongoose' {
set(value: string | Record): this;
/** The return value of this method is used in calls to JSON.stringify(doc). */
- toJSON(options?: ToObjectOptions & { flattenMaps?: true }): FlattenMaps>;
+ toJSON(options?: ToObjectOptions & { flattenMaps?: true, flattenObjectIds?: false }): FlattenMaps>;
+ toJSON(options: ToObjectOptions & { flattenObjectIds: false }): FlattenMaps>;
+ toJSON(options: ToObjectOptions & { flattenObjectIds: true }): ObjectIdToString>>;
toJSON(options: ToObjectOptions & { flattenMaps: false }): Require_id;
- toJSON>(options?: ToObjectOptions & { flattenMaps?: true }): FlattenMaps;
+ toJSON(options: ToObjectOptions & { flattenMaps: false; flattenObjectIds: true }): ObjectIdToString>;
+
+ toJSON>(options?: ToObjectOptions & { flattenMaps?: true, flattenObjectIds?: false }): FlattenMaps;
+ toJSON>(options: ToObjectOptions & { flattenObjectIds: false }): FlattenMaps;
+ toJSON>(options: ToObjectOptions & { flattenObjectIds: true }): ObjectIdToString>;
toJSON>(options: ToObjectOptions & { flattenMaps: false }): T;
+ toJSON>(options: ToObjectOptions & { flattenMaps: false; flattenObjectIds: true }): ObjectIdToString;
/** Converts this document into a plain-old JavaScript object ([POJO](https://masteringjs.io/tutorials/fundamentals/pojo)). */
toObject(options?: ToObjectOptions): Require_id;
diff --git a/types/index.d.ts b/types/index.d.ts
index 02c975a4eb3..668f67e55d1 100644
--- a/types/index.d.ts
+++ b/types/index.d.ts
@@ -25,7 +25,7 @@
///
///
-declare class NativeDate extends global.Date { }
+declare class NativeDate extends globalThis.Date { }
declare module 'mongoose' {
import Kareem = require('kareem');
@@ -138,6 +138,10 @@ declare module 'mongoose' {
? IfAny>
: T & { _id: Types.ObjectId };
+ export type Default__v = T extends { __v?: infer U }
+ ? T
+ : T & { __v: number };
+
/** Helper type for getting the hydrated document type from the raw document type. The hydrated document type is what `new MyModel()` returns. */
export type HydratedDocument<
DocType,
@@ -147,12 +151,12 @@ declare module 'mongoose' {
DocType,
any,
TOverrides extends Record ?
- Document & Require_id :
+ Document & Default__v> :
IfAny<
TOverrides,
- Document & Require_id,
+ Document & Default__v>,
Document & MergeType<
- Require_id,
+ Default__v>,
TOverrides
>
>
@@ -619,6 +623,9 @@ declare module 'mongoose' {
/** Additional options like `limit` and `lean`. */
options?: QueryOptions & { match?: AnyObject };
+ /** If true and the given `name` is a direct child of an array, apply the virtual to the array rather than the elements. */
+ applyToArray?: boolean;
+
/** Additional options for plugins */
[extra: string]: any;
}
@@ -702,6 +709,91 @@ declare module 'mongoose' {
[K in keyof T]: FlattenProperty;
};
+ /**
+ * Converts any Buffer properties into mongodb.Binary instances, which is what `lean()` returns
+ */
+ export type BufferToBinary = T extends TreatAsPrimitives ? T : T extends Record ? {
+ [K in keyof T]: T[K] extends Buffer
+ ? mongodb.Binary
+ : T[K] extends (Buffer | null | undefined)
+ ? mongodb.Binary | null | undefined
+ : T[K] extends Types.DocumentArray
+ ? Types.DocumentArray>
+ : T[K] extends Types.Subdocument
+ ? HydratedSingleSubdocument
+ : BufferToBinary;
+ } : T;
+
+ /**
+ * Converts any Buffer properties into { type: 'buffer', data: [1, 2, 3] } format for JSON serialization
+ */
+ export type BufferToJSON = T extends TreatAsPrimitives ? T : T extends Record ? {
+ [K in keyof T]: T[K] extends Buffer
+ ? { type: 'buffer', data: number[] }
+ : T[K] extends (Buffer | null | undefined)
+ ? { type: 'buffer', data: number[] } | null | undefined
+ : T[K] extends Types.DocumentArray
+ ? Types.DocumentArray>
+ : T[K] extends Types.Subdocument
+ ? HydratedSingleSubdocument
+ : BufferToBinary;
+ } : T;
+
+ /**
+ * Converts any ObjectId properties into strings for JSON serialization
+ */
+ export type ObjectIdToString = T extends TreatAsPrimitives ? T : T extends Record ? {
+ [K in keyof T]: T[K] extends mongodb.ObjectId
+ ? string
+ : T[K] extends (mongodb.ObjectId | null | undefined)
+ ? string | null | undefined
+ : T[K] extends Types.DocumentArray
+ ? Types.DocumentArray>
+ : T[K] extends Types.Subdocument
+ ? HydratedSingleSubdocument>
+ : ObjectIdToString;
+ } : T;
+
+ /**
+ * Converts any Date properties into strings for JSON serialization
+ */
+ export type DateToString = T extends TreatAsPrimitives ? T : T extends Record ? {
+ [K in keyof T]: T[K] extends NativeDate
+ ? string
+ : T[K] extends (NativeDate | null | undefined)
+ ? string | null | undefined
+ : T[K] extends Types.DocumentArray
+ ? Types.DocumentArray>
+ : T[K] extends Types.Subdocument
+ ? HydratedSingleSubdocument>
+ : DateToString;
+ } : T;
+
+ /**
+ * Converts any Mongoose subdocuments (single nested or doc arrays) into POJO equivalents
+ */
+ export type SubdocsToPOJOs = T extends TreatAsPrimitives ? T : T extends Record ? {
+ [K in keyof T]: T[K] extends NativeDate
+ ? string
+ : T[K] extends (NativeDate | null | undefined)
+ ? string | null | undefined
+ : T[K] extends Types.DocumentArray
+ ? ItemType[]
+ : T[K] extends Types.Subdocument
+ ? SubdocType
+ : SubdocsToPOJOs;
+ } : T;
+
+ export type JSONSerialized = SubdocsToPOJOs<
+ FlattenMaps<
+ BufferToJSON<
+ ObjectIdToString<
+ DateToString
+ >
+ >
+ >
+ >;
+
/**
* Separate type is needed for properties of union type (for example, Types.DocumentArray | undefined) to apply conditional check to each member of it
* https://www.typescriptlang.org/docs/handbook/2/conditional-types.html#distributive-conditional-types
@@ -712,7 +804,7 @@ declare module 'mongoose' {
? Types.DocumentArray> : FlattenMaps;
export type actualPrimitives = string | boolean | number | bigint | symbol | null | undefined;
- export type TreatAsPrimitives = actualPrimitives | NativeDate | RegExp | symbol | Error | BigInt | Types.ObjectId | Buffer | Function;
+ export type TreatAsPrimitives = actualPrimitives | NativeDate | RegExp | symbol | Error | BigInt | Types.ObjectId | Buffer | Function | mongodb.Binary;
export type SchemaDefinitionType = T extends Document ? Omit> : T;
diff --git a/types/indexes.d.ts b/types/indexes.d.ts
index f0891f081eb..805705905a2 100644
--- a/types/indexes.d.ts
+++ b/types/indexes.d.ts
@@ -57,7 +57,8 @@ declare module 'mongoose' {
type IndexDefinition = Record;
interface SyncIndexesOptions extends mongodb.CreateIndexesOptions {
- continueOnError?: boolean
+ continueOnError?: boolean;
+ hideIndexes?: boolean;
}
type ConnectionSyncIndexesResult = Record;
type OneCollectionSyncIndexesResult = Array & mongodb.MongoServerError;
diff --git a/types/inferrawdoctype.d.ts b/types/inferrawdoctype.d.ts
index 5ef52e13251..605571057a0 100644
--- a/types/inferrawdoctype.d.ts
+++ b/types/inferrawdoctype.d.ts
@@ -91,8 +91,8 @@ declare module 'mongoose' {
IfEquals extends true ? PathEnumOrString :
PathValueType extends NumberSchemaDefinition ? Options['enum'] extends ReadonlyArray ? Options['enum'][number] : number :
IfEquals extends true ? number :
- PathValueType extends DateSchemaDefinition ? Date :
- IfEquals extends true ? Date :
+ PathValueType extends DateSchemaDefinition ? NativeDate :
+ IfEquals extends true ? NativeDate :
PathValueType extends typeof Buffer | 'buffer' | 'Buffer' | typeof Schema.Types.Buffer ? Buffer :
PathValueType extends BooleanSchemaDefinition ? boolean :
IfEquals extends true ? boolean :
diff --git a/types/inferschematype.d.ts b/types/inferschematype.d.ts
index dbcb3ad851e..d73ad4cb81c 100644
--- a/types/inferschematype.d.ts
+++ b/types/inferschematype.d.ts
@@ -146,9 +146,10 @@ type RequiredPathKeys = {
* @param {TypeKey} TypeKey A generic of literal string type."Refers to the property used for path type definition".
* @returns a record contains required paths with the corresponding type.
*/
-type RequiredPaths = {
- [K in RequiredPathKeys]: T[K];
-};
+type RequiredPaths = Pick<
+ { -readonly [K in keyof T]: T[K] },
+ RequiredPathKeys
+>;
/**
* @summary A Utility to obtain schema's optional path keys.
@@ -166,9 +167,10 @@ type OptionalPathKeys = {
* @param {TypeKey} TypeKey A generic of literal string type."Refers to the property used for path type definition".
* @returns a record contains optional paths with the corresponding type.
*/
-type OptionalPaths = {
- [K in OptionalPathKeys]?: T[K];
-};
+type OptionalPaths = Pick<
+ { -readonly [K in keyof T]?: T[K] },
+ OptionalPathKeys
+>;
/**
* @summary Allows users to optionally choose their own type for a schema field for stronger typing.
@@ -183,8 +185,16 @@ type TypeHint = T extends { __typehint: infer U } ? U: never;
* @param {TypeKey} TypeKey A generic refers to document definition.
*/
type ObtainDocumentPathType = ResolvePathType<
-PathValueType extends PathWithTypePropertyBaseType ? PathValueType[TypeKey] : PathValueType,
-PathValueType extends PathWithTypePropertyBaseType ? Omit : {},
+ PathValueType extends PathWithTypePropertyBaseType
+ ? PathValueType[TypeKey] extends PathWithTypePropertyBaseType
+ ? PathValueType
+ : PathValueType[TypeKey]
+ : PathValueType,
+ PathValueType extends PathWithTypePropertyBaseType
+ ? PathValueType[TypeKey] extends PathWithTypePropertyBaseType
+ ? {}
+ : Omit
+ : {},
TypeKey,
TypeHint
>;
@@ -281,8 +291,8 @@ type ResolvePathType extends true ? PathEnumOrString :
PathValueType extends NumberSchemaDefinition ? Options['enum'] extends ReadonlyArray ? Options['enum'][number] : number :
IfEquals extends true ? number :
- PathValueType extends DateSchemaDefinition ? Date :
- IfEquals extends true ? Date :
+ PathValueType extends DateSchemaDefinition ? NativeDate :
+ IfEquals extends true ? NativeDate :
PathValueType extends typeof Buffer | 'buffer' | 'Buffer' | typeof Schema.Types.Buffer ? Buffer :
PathValueType extends BooleanSchemaDefinition ? boolean :
IfEquals extends true ? boolean :
diff --git a/types/models.d.ts b/types/models.d.ts
index 4c2403fd51b..5a5ced60944 100644
--- a/types/models.d.ts
+++ b/types/models.d.ts
@@ -64,14 +64,13 @@ declare module 'mongoose' {
throwOnValidationError?: boolean;
}
- type InsertManyResult = mongodb.InsertManyResult & {
- insertedIds: {
- [key: number]: InferId;
- };
+ interface InsertManyResult extends mongodb.InsertManyResult {
mongoose?: { validationErrors?: Array };
- };
+ }
type UpdateWriteOpResult = mongodb.UpdateResult;
+ type UpdateResult = mongodb.UpdateResult;
+ type DeleteResult = mongodb.DeleteResult;
interface MapReduceOptions {
map: Function | string;
@@ -127,7 +126,7 @@ declare module 'mongoose' {
}
interface ModifyResult {
- value: Require_id | null;
+ value: Default__v> | null;
/** see https://www.mongodb.com/docs/manual/reference/command/findAndModify/#lasterrorobject */
lastErrorObject?: {
updatedExisting?: boolean;
@@ -290,6 +289,14 @@ declare module 'mongoose' {
applyDefaults(obj: AnyObject): AnyObject;
applyDefaults(obj: TRawDocType): TRawDocType;
+ /* Apply virtuals to the given POJO. */
+ applyVirtuals(obj: AnyObject, virtalsToApply?: string[]): AnyObject;
+
+ /**
+ * Apply this model's timestamps to a given POJO, including subdocument timestamps
+ */
+ applyTimestamps(obj: AnyObject, options?: { isUpdate?: boolean, currentTime?: () => Date }): AnyObject;
+
/**
* Sends multiple `insertOne`, `updateOne`, `updateMany`, `replaceOne`,
* `deleteOne`, and/or `deleteMany` operations to the MongoDB server in one
diff --git a/types/query.d.ts b/types/query.d.ts
index 572ec33e1df..b2d4da347b7 100644
--- a/types/query.d.ts
+++ b/types/query.d.ts
@@ -12,7 +12,7 @@ declare module 'mongoose' {
*/
type RootFilterQuery = FilterQuery | Query | Types.ObjectId;
- type FilterQuery ={
+ type FilterQuery = {
[P in keyof T]?: Condition;
} & RootQuerySelector & { _id?: Condition; };
@@ -20,10 +20,12 @@ declare module 'mongoose' {
| 'context'
| 'multipleCastError'
| 'overwriteDiscriminatorKey'
+ | 'overwriteImmutable'
| 'populate'
| 'runValidators'
| 'sanitizeProjection'
| 'sanitizeFilter'
+ | 'schemaLevelProjections'
| 'setDefaultsOnInsert'
| 'strict'
| 'strictQuery'
@@ -116,10 +118,9 @@ declare module 'mongoose' {
$where?: string | Function;
/** @see https://www.mongodb.com/docs/manual/reference/operator/query/comment/#op._S_comment */
$comment?: string;
- // we could not find a proper TypeScript generic to support nested queries e.g. 'user.friends.name'
- // this will mark all unrecognized properties as any (including nested queries) only if
- // they include a "." (to avoid generically allowing any unexpected keys)
- [nestedSelector: `${string}.${string}`]: any;
+ $expr?: Record;
+ // this will mark all unrecognized properties as any (including nested queries)
+ [key: string]: any;
};
interface QueryTimestampsConfig {
@@ -154,6 +155,11 @@ declare module 'mongoose' {
new?: boolean;
overwriteDiscriminatorKey?: boolean;
+ /**
+ * Mongoose removes updated immutable properties from `update` by default (excluding $setOnInsert).
+ * Set `overwriteImmutable` to `true` to allow updating immutable properties using other update operators.
+ */
+ overwriteImmutable?: boolean;
projection?: ProjectionType;
/**
* if true, returns the full ModifyResult rather than just the document
@@ -180,6 +186,11 @@ declare module 'mongoose' {
* aren't explicitly allowed using `mongoose.trusted()`.
*/
sanitizeFilter?: boolean;
+ /**
+ * Enable or disable schema level projections for this query. Enabled by default.
+ * Set to `false` to include fields with `select: false` in the query result by default.
+ */
+ schemaLevelProjections?: boolean;
setDefaultsOnInsert?: boolean;
skip?: number;
sort?: any;
@@ -212,7 +223,7 @@ declare module 'mongoose' {
type QueryOpThatReturnsDocument = 'find' | 'findOne' | 'findOneAndUpdate' | 'findOneAndReplace' | 'findOneAndDelete';
type GetLeanResultType = QueryOp extends QueryOpThatReturnsDocument
- ? (ResultType extends any[] ? Require_id>[] : Require_id>)
+ ? (ResultType extends any[] ? Default__v>>>[] : Default__v>>>)
: ResultType;
type MergePopulatePaths> = QueryOp extends QueryOpThatReturnsDocument
@@ -735,6 +746,12 @@ declare module 'mongoose' {
*/
sanitizeProjection(value: boolean): this;
+ /**
+ * Enable or disable schema level projections for this query. Enabled by default.
+ * Set to `false` to include fields with `select: false` in the query result by default.
+ */
+ schemaLevelProjections(value: boolean): this;
+
/** Specifies which document fields to include or exclude (also known as the query "projection") */
select(
arg: string | string[] | Record
diff --git a/types/schematypes.d.ts b/types/schematypes.d.ts
index e8a0ecffdf0..aff686e1ec9 100644
--- a/types/schematypes.d.ts
+++ b/types/schematypes.d.ts
@@ -216,6 +216,9 @@ declare module 'mongoose' {
/** Attaches a getter for all instances of this schema type. */
static get(getter: (value: any) => any): void;
+ /** Array containing default setters for all instances of this SchemaType */
+ static setters: ((val?: unknown, priorVal?: unknown, doc?: Document, options?: Record | null) => unknown)[];
+
/** The class that Mongoose uses internally to instantiate this SchemaType's `options` property. */
OptionsConstructor: SchemaTypeOptions;
@@ -229,6 +232,9 @@ declare module 'mongoose' {
/** Adds a getter to this schematype. */
get(fn: Function): this;
+ /** Gets this SchemaType's embedded SchemaType, if any */
+ getEmbeddedSchemaType(): SchemaType | undefined;
+
/**
* Defines this path as immutable. Mongoose prevents you from changing
* immutable paths unless the parent document has [`isNew: true`](/docs/api/document.html#document_Document-isNew).