diff --git a/.evergreen/config.in.yml b/.evergreen/config.in.yml index da9d6158081..eab1c6fbdea 100644 --- a/.evergreen/config.in.yml +++ b/.evergreen/config.in.yml @@ -123,58 +123,6 @@ functions: env: DRIVERS_TOOLS: ${DRIVERS_TOOLS} - "bootstrap oidc": - - command: ec2.assume_role - params: - role_arn: ${OIDC_AWS_ROLE_ARN} - - command: shell.exec - type: test - params: - working_dir: "src" - shell: bash - script: | - ${PREPARE_SHELL} - cd "${DRIVERS_TOOLS}"/.evergreen/auth_oidc - - # This is a bit confusing but the ec2.assume_role command before - # this task will overwrite these variables to a different value - # than we have set in our evergreen project config. As these are - # now specific to the OIDC ARN, we re-export for the python - # scripts. - export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} - export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} - export AWS_SESSION_TOKEN=${AWS_SESSION_TOKEN} - export OIDC_TOKEN_DIR=/tmp/tokens - - . ./activate-authoidcvenv.sh - python oidc_write_orchestration.py - python oidc_get_tokens.py - - "setup oidc roles": - - command: subprocess.exec - params: - working_dir: src - binary: bash - args: - - .evergreen/setup-oidc-roles.sh - env: - DRIVERS_TOOLS: ${DRIVERS_TOOLS} - - "run oidc tests aws": - - command: shell.exec - type: test - params: - working_dir: "src" - timeout_secs: 300 - shell: bash - script: | - ${PREPARE_SHELL} - - OIDC_TOKEN_DIR="/tmp/tokens" \ - AWS_WEB_IDENTITY_TOKEN_FILE="/tmp/tokens/test_user1" \ - PROJECT_DIRECTORY="${PROJECT_DIRECTORY}" \ - bash ${PROJECT_DIRECTORY}/.evergreen/run-oidc-tests.sh - "run tests": - command: shell.exec type: test @@ -1260,23 +1208,75 @@ tasks: - name: "oidc-auth-test-azure-latest" commands: - - command: expansions.update - type: setup - params: - updates: - - { key: NPM_VERSION, value: "9" } - func: "install dependencies" - command: subprocess.exec + type: test params: working_dir: src binary: bash env: DRIVERS_TOOLS: ${DRIVERS_TOOLS} PROJECT_DIRECTORY: ${PROJECT_DIRECTORY} - AZUREOIDC_CLIENTID: ${testazureoidc_clientid} - PROVIDER_NAME: azure + ENVIRONMENT: azure + SCRIPT: run-oidc-prose-tests.sh args: - .evergreen/run-oidc-tests-azure.sh + - command: subprocess.exec + type: test + params: + working_dir: src + binary: bash + env: + DRIVERS_TOOLS: ${DRIVERS_TOOLS} + PROJECT_DIRECTORY: ${PROJECT_DIRECTORY} + ENVIRONMENT: azure + SCRIPT: run-oidc-unified-tests.sh + args: + - .evergreen/run-oidc-tests-azure.sh + + - name: "oidc-auth-test-test-latest" + commands: + - func: "install dependencies" + - command: subprocess.exec + type: test + params: + working_dir: src + binary: bash + env: + DRIVERS_TOOLS: ${DRIVERS_TOOLS} + PROJECT_DIRECTORY: ${PROJECT_DIRECTORY} + ENVIRONMENT: test + SCRIPT: run-oidc-prose-tests.sh + args: + - .evergreen/run-oidc-tests-test.sh + - command: subprocess.exec + type: test + params: + working_dir: src + binary: bash + env: + DRIVERS_TOOLS: ${DRIVERS_TOOLS} + PROJECT_DIRECTORY: ${PROJECT_DIRECTORY} + ENVIRONMENT: test + SCRIPT: run-oidc-unified-tests.sh + args: + - .evergreen/run-oidc-tests-test.sh + + - name: "oidc-auth-test-gcp-latest" + commands: + - func: "install dependencies" + - command: subprocess.exec + type: test + params: + working_dir: src + binary: bash + env: + DRIVERS_TOOLS: ${DRIVERS_TOOLS} + PROJECT_DIRECTORY: ${PROJECT_DIRECTORY} + ENVIRONMENT: gcp + SCRIPT: run-oidc-prose-tests.sh + args: + - .evergreen/run-oidc-tests-gcp.sh - name: "test-aws-lambda-deployed" commands: @@ -1428,6 +1428,25 @@ task_groups: tasks: - test-azurekms-task + - name: testtestoidc_task_group + setup_group: + - func: fetch source + - command: ec2.assume_role + params: + role_arn: ${OIDC_AWS_ROLE_ARN} + - command: subprocess.exec + params: + binary: bash + include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] + env: + MONGODB_VERSION: "8.0" + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/setup.sh + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 + tasks: + - oidc-auth-test-test-latest + - name: testazureoidc_task_group setup_group: - func: fetch source @@ -1437,25 +1456,43 @@ task_groups: script: |- set -o errexit ${PREPARE_SHELL} - export AZUREOIDC_CLIENTID="${testazureoidc_clientid}" - export AZUREOIDC_TENANTID="${testazureoic_tenantid}" - export AZUREOIDC_SECRET="${testazureoidc_secret}" - export AZUREOIDC_KEYVAULT=${testazureoidc_keyvault} - export AZUREOIDC_DRIVERS_TOOLS="$DRIVERS_TOOLS" export AZUREOIDC_VMNAME_PREFIX="NODE_DRIVER" - $DRIVERS_TOOLS/.evergreen/auth_oidc/azure/create-and-setup-vm.sh - teardown_group: + $DRIVERS_TOOLS/.evergreen/auth_oidc/azure/setup.sh + teardown_task: - command: shell.exec params: shell: bash script: |- ${PREPARE_SHELL} - $DRIVERS_TOOLS/.evergreen/auth_oidc/azure/delete-vm.sh + $DRIVERS_TOOLS/.evergreen/auth_oidc/azure/teardown.sh setup_group_can_fail_task: true setup_group_timeout_secs: 1800 tasks: - oidc-auth-test-azure-latest + - name: testgcpoidc_task_group + setup_group: + - func: fetch source + - command: shell.exec + params: + shell: bash + script: |- + set -o errexit + ${PREPARE_SHELL} + export GCPOIDC_VMNAME_PREFIX="NODE_DRIVER" + $DRIVERS_TOOLS/.evergreen/auth_oidc/gcp/setup.sh + teardown_task: + - command: shell.exec + params: + shell: bash + script: |- + ${PREPARE_SHELL} + $DRIVERS_TOOLS/.evergreen/auth_oidc/gcp/teardown.sh + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 + tasks: + - oidc-auth-test-gcp-latest + - name: test_atlas_task_group setup_group: - func: fetch source @@ -1471,7 +1508,7 @@ task_groups: - command: expansions.update params: file: src/atlas-expansion.yml - teardown_group: + teardown_task: - command: subprocess.exec params: working_dir: src @@ -1499,7 +1536,7 @@ task_groups: - command: expansions.update params: file: src/atlas-expansion.yml - teardown_group: + teardown_task: - command: subprocess.exec params: working_dir: src diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 7980b5fd326..e60beb8d5b1 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -96,55 +96,6 @@ functions: - .evergreen/run-azure-kms-mock-server.sh env: DRIVERS_TOOLS: ${DRIVERS_TOOLS} - bootstrap oidc: - - command: ec2.assume_role - params: - role_arn: ${OIDC_AWS_ROLE_ARN} - - command: shell.exec - type: test - params: - working_dir: src - shell: bash - script: | - ${PREPARE_SHELL} - cd "${DRIVERS_TOOLS}"/.evergreen/auth_oidc - - # This is a bit confusing but the ec2.assume_role command before - # this task will overwrite these variables to a different value - # than we have set in our evergreen project config. As these are - # now specific to the OIDC ARN, we re-export for the python - # scripts. - export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} - export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} - export AWS_SESSION_TOKEN=${AWS_SESSION_TOKEN} - export OIDC_TOKEN_DIR=/tmp/tokens - - . ./activate-authoidcvenv.sh - python oidc_write_orchestration.py - python oidc_get_tokens.py - setup oidc roles: - - command: subprocess.exec - params: - working_dir: src - binary: bash - args: - - .evergreen/setup-oidc-roles.sh - env: - DRIVERS_TOOLS: ${DRIVERS_TOOLS} - run oidc tests aws: - - command: shell.exec - type: test - params: - working_dir: src - timeout_secs: 300 - shell: bash - script: | - ${PREPARE_SHELL} - - OIDC_TOKEN_DIR="/tmp/tokens" \ - AWS_WEB_IDENTITY_TOKEN_FILE="/tmp/tokens/test_user1" \ - PROJECT_DIRECTORY="${PROJECT_DIRECTORY}" \ - bash ${PROJECT_DIRECTORY}/.evergreen/run-oidc-tests.sh run tests: - command: shell.exec type: test @@ -1211,23 +1162,73 @@ tasks: - src/.evergreen/run-azure-kms-tests.sh - name: oidc-auth-test-azure-latest commands: - - command: expansions.update - type: setup - params: - updates: - - {key: NPM_VERSION, value: '9'} - func: install dependencies - command: subprocess.exec + type: test params: working_dir: src binary: bash env: DRIVERS_TOOLS: ${DRIVERS_TOOLS} PROJECT_DIRECTORY: ${PROJECT_DIRECTORY} - AZUREOIDC_CLIENTID: ${testazureoidc_clientid} - PROVIDER_NAME: azure + ENVIRONMENT: azure + SCRIPT: run-oidc-prose-tests.sh args: - .evergreen/run-oidc-tests-azure.sh + - command: subprocess.exec + type: test + params: + working_dir: src + binary: bash + env: + DRIVERS_TOOLS: ${DRIVERS_TOOLS} + PROJECT_DIRECTORY: ${PROJECT_DIRECTORY} + ENVIRONMENT: azure + SCRIPT: run-oidc-unified-tests.sh + args: + - .evergreen/run-oidc-tests-azure.sh + - name: oidc-auth-test-test-latest + commands: + - func: install dependencies + - command: subprocess.exec + type: test + params: + working_dir: src + binary: bash + env: + DRIVERS_TOOLS: ${DRIVERS_TOOLS} + PROJECT_DIRECTORY: ${PROJECT_DIRECTORY} + ENVIRONMENT: test + SCRIPT: run-oidc-prose-tests.sh + args: + - .evergreen/run-oidc-tests-test.sh + - command: subprocess.exec + type: test + params: + working_dir: src + binary: bash + env: + DRIVERS_TOOLS: ${DRIVERS_TOOLS} + PROJECT_DIRECTORY: ${PROJECT_DIRECTORY} + ENVIRONMENT: test + SCRIPT: run-oidc-unified-tests.sh + args: + - .evergreen/run-oidc-tests-test.sh + - name: oidc-auth-test-gcp-latest + commands: + - func: install dependencies + - command: subprocess.exec + type: test + params: + working_dir: src + binary: bash + env: + DRIVERS_TOOLS: ${DRIVERS_TOOLS} + PROJECT_DIRECTORY: ${PROJECT_DIRECTORY} + ENVIRONMENT: gcp + SCRIPT: run-oidc-prose-tests.sh + args: + - .evergreen/run-oidc-tests-gcp.sh - name: test-aws-lambda-deployed commands: - command: expansions.update @@ -1946,25 +1947,6 @@ tasks: commands: - func: install dependencies - func: run ldap tests - - name: test-auth-oidc - tags: - - latest - - replica_set - - oidc - commands: - - command: expansions.update - type: setup - params: - updates: - - {key: VERSION, value: latest} - - {key: TOPOLOGY, value: replica_set} - - {key: AUTH, value: auth} - - {key: ORCHESTRATION_FILE, value: auth-oidc.json} - - func: install dependencies - - func: bootstrap oidc - - func: bootstrap mongo-orchestration - - func: setup oidc roles - - func: run oidc tests aws - name: test-socks5 tags: [] commands: @@ -4467,6 +4449,27 @@ task_groups: - ${DRIVERS_TOOLS}/.evergreen/csfle/azurekms/teardown.sh tasks: - test-azurekms-task + - name: testtestoidc_task_group + setup_group: + - func: fetch source + - command: ec2.assume_role + params: + role_arn: ${OIDC_AWS_ROLE_ARN} + - command: subprocess.exec + params: + binary: bash + include_expansions_in_env: + - AWS_ACCESS_KEY_ID + - AWS_SECRET_ACCESS_KEY + - AWS_SESSION_TOKEN + env: + MONGODB_VERSION: '8.0' + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/setup.sh + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 + tasks: + - oidc-auth-test-test-latest - name: testazureoidc_task_group setup_group: - func: fetch source @@ -4476,24 +4479,41 @@ task_groups: script: |- set -o errexit ${PREPARE_SHELL} - export AZUREOIDC_CLIENTID="${testazureoidc_clientid}" - export AZUREOIDC_TENANTID="${testazureoic_tenantid}" - export AZUREOIDC_SECRET="${testazureoidc_secret}" - export AZUREOIDC_KEYVAULT=${testazureoidc_keyvault} - export AZUREOIDC_DRIVERS_TOOLS="$DRIVERS_TOOLS" export AZUREOIDC_VMNAME_PREFIX="NODE_DRIVER" - $DRIVERS_TOOLS/.evergreen/auth_oidc/azure/create-and-setup-vm.sh - teardown_group: + $DRIVERS_TOOLS/.evergreen/auth_oidc/azure/setup.sh + teardown_task: - command: shell.exec params: shell: bash script: |- ${PREPARE_SHELL} - $DRIVERS_TOOLS/.evergreen/auth_oidc/azure/delete-vm.sh + $DRIVERS_TOOLS/.evergreen/auth_oidc/azure/teardown.sh setup_group_can_fail_task: true setup_group_timeout_secs: 1800 tasks: - oidc-auth-test-azure-latest + - name: testgcpoidc_task_group + setup_group: + - func: fetch source + - command: shell.exec + params: + shell: bash + script: |- + set -o errexit + ${PREPARE_SHELL} + export GCPOIDC_VMNAME_PREFIX="NODE_DRIVER" + $DRIVERS_TOOLS/.evergreen/auth_oidc/gcp/setup.sh + teardown_task: + - command: shell.exec + params: + shell: bash + script: |- + ${PREPARE_SHELL} + $DRIVERS_TOOLS/.evergreen/auth_oidc/gcp/teardown.sh + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 + tasks: + - oidc-auth-test-gcp-latest - name: test_atlas_task_group setup_group: - func: fetch source @@ -4509,7 +4529,7 @@ task_groups: - command: expansions.update params: file: src/atlas-expansion.yml - teardown_group: + teardown_task: - command: subprocess.exec params: working_dir: src @@ -4536,7 +4556,7 @@ task_groups: - command: expansions.update params: file: src/atlas-expansion.yml - teardown_group: + teardown_task: - command: subprocess.exec params: working_dir: src @@ -4615,7 +4635,6 @@ buildvariants: - test-latest-load-balanced - test-auth-kerberos - test-auth-ldap - - test-auth-oidc - test-socks5 - test-socks5-csfle - test-socks5-tls @@ -4675,7 +4694,6 @@ buildvariants: - test-latest-load-balanced - test-auth-kerberos - test-auth-ldap - - test-auth-oidc - test-socks5 - test-socks5-csfle - test-socks5-tls @@ -4735,7 +4753,6 @@ buildvariants: - test-latest-load-balanced - test-auth-kerberos - test-auth-ldap - - test-auth-oidc - test-socks5 - test-socks5-csfle - test-socks5-tls @@ -4794,7 +4811,6 @@ buildvariants: - test-latest-load-balanced - test-auth-kerberos - test-auth-ldap - - test-auth-oidc - test-socks5 - test-socks5-csfle - test-socks5-tls @@ -5138,6 +5154,16 @@ buildvariants: tasks: - test_azurekms_task_group - test-azurekms-fail-task + - name: ubuntu20-test-all-oidc + display_name: MONGODB-OIDC Auth Tests + run_on: ubuntu2004-small + expansions: + NODE_LTS_VERSION: 20 + batchtime: 20160 + tasks: + - testtestoidc_task_group + - testazureoidc_task_group + - testgcpoidc_task_group - name: rhel8-test-atlas display_name: Atlas Cluster Tests run_on: rhel80-large diff --git a/.evergreen/generate_evergreen_tasks.js b/.evergreen/generate_evergreen_tasks.js index 406b926cab5..31dfbc84de7 100644 --- a/.evergreen/generate_evergreen_tasks.js +++ b/.evergreen/generate_evergreen_tasks.js @@ -165,23 +165,6 @@ TASKS.push( tags: ['auth', 'ldap'], commands: [{ func: 'install dependencies' }, { func: 'run ldap tests' }] }, - { - name: 'test-auth-oidc', - tags: ['latest', 'replica_set', 'oidc'], - commands: [ - updateExpansions({ - VERSION: 'latest', - TOPOLOGY: 'replica_set', - AUTH: 'auth', - ORCHESTRATION_FILE: 'auth-oidc.json' - }), - { func: 'install dependencies' }, - { func: 'bootstrap oidc' }, - { func: 'bootstrap mongo-orchestration' }, - { func: 'setup oidc roles' }, - { func: 'run oidc tests aws' } - ] - }, { name: 'test-socks5', tags: [], @@ -705,16 +688,20 @@ BUILD_VARIANTS.push({ tasks: ['test_azurekms_task_group', 'test-azurekms-fail-task'] }); -// TODO(DRIVERS-2416/NODE-4929) - Azure credentials are expired, a new drivers ticket -// should be created but at the moment for our test failures we will reference the -// open DRIVERS ticket and completed NODE ticket. -// BUILD_VARIANTS.push({ -// name: 'ubuntu20-test-azure-oidc', -// display_name: 'Azure OIDC', -// run_on: UBUNTU_20_OS, -// batchtime: 20160, -// tasks: ['testazureoidc_task_group'] -// }); +BUILD_VARIANTS.push({ + name: 'ubuntu20-test-all-oidc', + display_name: 'MONGODB-OIDC Auth Tests', + run_on: UBUNTU_20_OS, + expansions: { + NODE_LTS_VERSION: LATEST_LTS + }, + batchtime: 20160, + tasks: [ + 'testtestoidc_task_group', + 'testazureoidc_task_group', + 'testgcpoidc_task_group' + ] +}); BUILD_VARIANTS.push({ name: 'rhel8-test-atlas', diff --git a/.evergreen/run-oidc-prose-tests.sh b/.evergreen/run-oidc-prose-tests.sh new file mode 100755 index 00000000000..ae9de15d361 --- /dev/null +++ b/.evergreen/run-oidc-prose-tests.sh @@ -0,0 +1,24 @@ +#!/bin/bash +set -o errexit # Exit the script with error if any of the commands fail +set -o xtrace # Write all commands first to stderr + +ENVIRONMENT=${ENVIRONMENT:-"test"} +PROJECT_DIRECTORY=${PROJECT_DIRECTORY:-"."} +source "${PROJECT_DIRECTORY}/.evergreen/init-node-and-npm-env.sh" + +if [ -z "${MONGODB_URI_SINGLE}" ]; then + echo "Must specify MONGODB_URI_SINGLE" + exit 1 +fi + +if [ "$ENVIRONMENT" = "azure" ]; then + npm run check:oidc-azure +elif [ "$ENVIRONMENT" = "gcp" ]; then + npm run check:oidc-gcp +else + if [ -z "${OIDC_TOKEN_FILE}" ]; then + echo "Must specify OIDC_TOKEN_FILE" + exit 1 + fi + npm run check:oidc-test +fi diff --git a/.evergreen/run-oidc-tests-azure.sh b/.evergreen/run-oidc-tests-azure.sh index 6e65bff3f44..4fa7c5bd55d 100644 --- a/.evergreen/run-oidc-tests-azure.sh +++ b/.evergreen/run-oidc-tests-azure.sh @@ -4,8 +4,7 @@ set -o errexit # Exit the script with error if any of the commands fail export AZUREOIDC_DRIVERS_TAR_FILE=/tmp/node-mongodb-native.tgz tar czf $AZUREOIDC_DRIVERS_TAR_FILE . -export AZUREOIDC_TEST_CMD="source ./env.sh && PROVIDER_NAME=azure ./.evergreen/run-oidc-tests.sh" -export AZUREOIDC_CLIENTID=$AZUREOIDC_CLIENTID +export AZUREOIDC_TEST_CMD="source ./env.sh && ENVIRONMENT=azure ./.evergreen/${SCRIPT}" export PROJECT_DIRECTORY=$PROJECT_DIRECTORY -export PROVIDER_NAME=$PROVIDER_NAME +export ENVIRONMENT=$ENVIRONMENT bash $DRIVERS_TOOLS/.evergreen/auth_oidc/azure/run-driver-test.sh \ No newline at end of file diff --git a/.evergreen/run-oidc-tests-gcp.sh b/.evergreen/run-oidc-tests-gcp.sh new file mode 100644 index 00000000000..f2fc1de2dc1 --- /dev/null +++ b/.evergreen/run-oidc-tests-gcp.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -o xtrace # Write all commands first to stderr +set -o errexit # Exit the script with error if any of the commands fail + +export GCPOIDC_DRIVERS_TAR_FILE=/tmp/node-mongodb-native.tgz +tar czf $GCPOIDC_DRIVERS_TAR_FILE . +export GCPOIDC_TEST_CMD="source ./secrets-export.sh drivers/gcpoidc && ENVIRONMENT=gcp ./.evergreen/${SCRIPT}" +export PROJECT_DIRECTORY=$PROJECT_DIRECTORY +export ENVIRONMENT=$ENVIRONMENT +bash $DRIVERS_TOOLS/.evergreen/auth_oidc/gcp/run-driver-test.sh \ No newline at end of file diff --git a/.evergreen/run-oidc-tests-test.sh b/.evergreen/run-oidc-tests-test.sh new file mode 100644 index 00000000000..59389bc0ca8 --- /dev/null +++ b/.evergreen/run-oidc-tests-test.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -o xtrace # Write all commands first to stderr +set -o errexit # Exit the script with error if any of the commands fail + +source $DRIVERS_TOOLS/.evergreen/auth_oidc/secrets-export.sh +export PROJECT_DIRECTORY=$PROJECT_DIRECTORY +export ENVIRONMENT=$ENVIRONMENT +printenv +export AWS_WEB_IDENTITY_TOKEN_FILE=$OIDC_TOKEN_FILE +ls -la $OIDC_TOKEN_DIR +bash ./.evergreen/${SCRIPT} \ No newline at end of file diff --git a/.evergreen/run-oidc-tests.sh b/.evergreen/run-oidc-tests.sh deleted file mode 100755 index 98881a0c2d2..00000000000 --- a/.evergreen/run-oidc-tests.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail -set -o xtrace # Write all commands first to stderr - -PROVIDER_NAME=${PROVIDER_NAME:-"aws"} -PROJECT_DIRECTORY=${PROJECT_DIRECTORY:-"."} -source "${PROJECT_DIRECTORY}/.evergreen/init-node-and-npm-env.sh" - -MONGODB_URI=${MONGODB_URI:-"mongodb://127.0.0.1:27017"} - -export OIDC_TOKEN_DIR=${OIDC_TOKEN_DIR} - -export MONGODB_URI=${MONGODB_URI:-"mongodb://localhost"} - -if [ "$PROVIDER_NAME" = "aws" ]; then - export MONGODB_URI_SINGLE="${MONGODB_URI}/?authMechanism=MONGODB-OIDC" - export MONGODB_URI_MULTIPLE="${MONGODB_URI}:27018/?authMechanism=MONGODB-OIDC&directConnection=true" - - if [ -z "${OIDC_TOKEN_DIR}" ]; then - echo "Must specify OIDC_TOKEN_DIR" - exit 1 - fi - npm run check:oidc -elif [ "$PROVIDER_NAME" = "azure" ]; then - if [ -z "${AZUREOIDC_CLIENTID}" ]; then - echo "Must specify an AZUREOIDC_CLIENTID" - exit 1 - fi - MONGODB_URI="${MONGODB_URI}/?authMechanism=MONGODB-OIDC" - MONGODB_URI="${MONGODB_URI}&authMechanismProperties=PROVIDER_NAME:azure" - export MONGODB_URI="${MONGODB_URI},TOKEN_AUDIENCE:api%3A%2F%2F${AZUREOIDC_CLIENTID}" - npm run check:oidc-azure -else - npm run check:oidc -fi diff --git a/.evergreen/run-oidc-unified-tests.sh b/.evergreen/run-oidc-unified-tests.sh new file mode 100755 index 00000000000..051256a64f9 --- /dev/null +++ b/.evergreen/run-oidc-unified-tests.sh @@ -0,0 +1,16 @@ +#!/bin/bash +set -o errexit # Exit the script with error if any of the commands fail +set -o xtrace # Write all commands first to stderr + +ENVIRONMENT=${ENVIRONMENT:-"test"} +PROJECT_DIRECTORY=${PROJECT_DIRECTORY:-"."} +source "${PROJECT_DIRECTORY}/.evergreen/init-node-and-npm-env.sh" + +if [ "$ENVIRONMENT" = "test" ]; then + export OIDC_TOKEN_DIR=${OIDC_TOKEN_DIR} + export MONGODB_URI_SINGLE="${MONGODB_URI_SINGLE}&authMechanismProperties=ENVIRONMENT:test" +fi +export UTIL_CLIENT_USER=$OIDC_ADMIN_USER +export UTIL_CLIENT_PASSWORD=$OIDC_ADMIN_PWD + +npm run check:oidc-auth \ No newline at end of file diff --git a/.evergreen/setup-oidc-roles.sh b/.evergreen/setup-oidc-roles.sh deleted file mode 100644 index 6be43905cf7..00000000000 --- a/.evergreen/setup-oidc-roles.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail -set -o xtrace # Write all commands first to stderr - -cd ${DRIVERS_TOOLS}/.evergreen/auth_oidc -. ./activate-authoidcvenv.sh - -${DRIVERS_TOOLS}/mongodb/bin/mongosh "mongodb://localhost:27017,localhost:27018/?replicaSet=oidc-repl0&readPreference=primary" setup_oidc.js diff --git a/.github/scripts/highlights.mjs b/.github/scripts/highlights.mjs index 6df213dced8..64363c432c2 100644 --- a/.github/scripts/highlights.mjs +++ b/.github/scripts/highlights.mjs @@ -1,25 +1,22 @@ // @ts-check import * as process from 'node:process'; -import { Octokit } from '@octokit/core'; import { output } from './util.mjs'; const { GITHUB_TOKEN = '', PR_LIST = '', - owner = 'mongodb', - repo = 'node-mongodb-native' + REPOSITORY = '' } = process.env; if (GITHUB_TOKEN === '') throw new Error('GITHUB_TOKEN cannot be empty'); +if (REPOSITORY === '') throw new Error('REPOSITORY cannot be empty') -const octokit = new Octokit({ - auth: GITHUB_TOKEN, - log: { - debug: msg => console.error('Octokit.debug', msg), - info: msg => console.error('Octokit.info', msg), - warn: msg => console.error('Octokit.warn', msg), - error: msg => console.error('Octokit.error', msg) +const API_REQ_INFO = { + headers: { + Accept: 'application/vnd.github.v3+json', + 'X-GitHub-Api-Version': '2022-11-28', + Authorization: `Bearer ${GITHUB_TOKEN}` } -}); +} const prs = PR_LIST.split(',').map(pr => { const prNum = Number(pr); @@ -35,13 +32,10 @@ async function getPullRequestContent(pull_number) { let body; try { - const res = await octokit.request('GET /repos/{owner}/{repo}/pulls/{pull_number}', { - owner, - repo, - pull_number, - headers: { 'X-GitHub-Api-Version': '2022-11-28' } - }); - body = res.data.body; + const response = await fetch(new URL(`https://api.github.com/repos/${REPOSITORY}/pulls/${pull_number}`), API_REQ_INFO); + if (!response.ok) throw new Error(await response.text()); + const pr = await response.json(); + body = pr.body; } catch (error) { console.log(`Could not get PR ${pull_number}, skipping. ${error.status}`); return ''; diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 00000000000..4ff097d9c77 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,46 @@ +name: "CodeQL" + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +jobs: + analyze: + name: Analyze (${{ matrix.language }}) + runs-on: 'ubuntu-latest' + timeout-minutes: 360 + permissions: + # required for all workflows + security-events: write + + # required to fetch internal or private CodeQL packs + packages: read + + # only required for workflows in private repositories + actions: read + contents: read + + strategy: + fail-fast: false + matrix: + include: + - language: javascript-typescript + build-mode: none + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + build-mode: ${{ matrix.build-mode }} + source-root: "./src" + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b58b4a44195..c161f125c4e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - id: release - uses: google-github-actions/release-please-action@v4 + uses: googleapis/release-please-action@v4 # If release-please created a release, publish to npm - if: ${{ steps.release.outputs.release_created }} diff --git a/.github/workflows/release_notes.yml b/.github/workflows/release_notes.yml index 3293db7ef33..54b038f5319 100644 --- a/.github/workflows/release_notes.yml +++ b/.github/workflows/release_notes.yml @@ -64,6 +64,7 @@ jobs: env: GITHUB_TOKEN: ${{ github.token }} PR_LIST: ${{ steps.pr_list.outputs.pr_list }} + REPOSITORY: ${{ github.repository }} # The combined output is available - id: release_notes diff --git a/.release-please-manifest.json b/.release-please-manifest.json index a7a751dd731..5c63699c14b 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "6.6.2" + ".": "6.7.0" } diff --git a/HISTORY.md b/HISTORY.md index 39bc4488d7a..ea7dfee973d 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -2,6 +2,18 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [6.7.0](https://github.com/mongodb/node-mongodb-native/compare/v6.6.2...v6.7.0) (2024-05-29) + + +### Features + +* **NODE-5464:** OIDC machine and callback workflow ([#3912](https://github.com/mongodb/node-mongodb-native/issues/3912)) ([2ba8434](https://github.com/mongodb/node-mongodb-native/commit/2ba8434d10aa02ddf281482cc02ef168c1b2965c)) + + +### Bug Fixes + +* **NODE-6165:** useBigInt64 causes compareTopologyVersion to throw ([#4109](https://github.com/mongodb/node-mongodb-native/issues/4109)) ([21b729b](https://github.com/mongodb/node-mongodb-native/commit/21b729b983e2ddbe1dd9cff31f996825a45ec4e0)) + ## [6.6.2](https://github.com/mongodb/node-mongodb-native/compare/v6.6.1...v6.6.2) (2024-05-15) diff --git a/package-lock.json b/package-lock.json index 8096f6df15c..e1b1113838a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "mongodb", - "version": "6.6.2", + "version": "6.7.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "mongodb", - "version": "6.6.2", + "version": "6.7.0", "license": "Apache-2.0", "dependencies": { "@mongodb-js/saslprep": "^1.1.5", @@ -20,7 +20,6 @@ "@microsoft/api-extractor": "^7.43.1", "@microsoft/tsdoc-config": "^0.16.2", "@mongodb-js/zstd": "^1.2.0", - "@octokit/core": "^6.1.2", "@types/chai": "^4.3.14", "@types/chai-subset": "^1.3.5", "@types/express": "^4.17.21", @@ -2083,102 +2082,6 @@ "node": ">= 8" } }, - "node_modules/@octokit/auth-token": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-5.1.1.tgz", - "integrity": "sha512-rh3G3wDO8J9wSjfI436JUKzHIxq8NaiL0tVeB2aXmG6p/9859aUOAjA9pmSPNGGZxfwmaJ9ozOJImuNVJdpvbA==", - "dev": true, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@octokit/core": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-6.1.2.tgz", - "integrity": "sha512-hEb7Ma4cGJGEUNOAVmyfdB/3WirWMg5hDuNFVejGEDFqupeOysLc2sG6HJxY2etBp5YQu5Wtxwi020jS9xlUwg==", - "dev": true, - "dependencies": { - "@octokit/auth-token": "^5.0.0", - "@octokit/graphql": "^8.0.0", - "@octokit/request": "^9.0.0", - "@octokit/request-error": "^6.0.1", - "@octokit/types": "^13.0.0", - "before-after-hook": "^3.0.2", - "universal-user-agent": "^7.0.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@octokit/endpoint": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz", - "integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==", - "dev": true, - "dependencies": { - "@octokit/types": "^13.0.0", - "universal-user-agent": "^7.0.2" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@octokit/graphql": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-8.1.1.tgz", - "integrity": "sha512-ukiRmuHTi6ebQx/HFRCXKbDlOh/7xEV6QUXaE7MJEKGNAncGI/STSbOkl12qVXZrfZdpXctx5O9X1AIaebiDBg==", - "dev": true, - "dependencies": { - "@octokit/request": "^9.0.0", - "@octokit/types": "^13.0.0", - "universal-user-agent": "^7.0.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@octokit/openapi-types": { - "version": "22.1.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.1.0.tgz", - "integrity": "sha512-pGUdSP+eEPfZiQHNkZI0U01HLipxncisdJQB4G//OAmfeO8sqTQ9KRa0KF03TUPCziNsoXUrTg4B2Q1EX++T0Q==", - "dev": true - }, - "node_modules/@octokit/request": { - "version": "9.1.1", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-9.1.1.tgz", - "integrity": "sha512-pyAguc0p+f+GbQho0uNetNQMmLG1e80WjkIaqqgUkihqUp0boRU6nKItXO4VWnr+nbZiLGEyy4TeKRwqaLvYgw==", - "dev": true, - "dependencies": { - "@octokit/endpoint": "^10.0.0", - "@octokit/request-error": "^6.0.1", - "@octokit/types": "^13.1.0", - "universal-user-agent": "^7.0.2" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@octokit/request-error": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.1.tgz", - "integrity": "sha512-1mw1gqT3fR/WFvnoVpY/zUM2o/XkMs/2AszUUG9I69xn0JFLv6PGkPhNk5lbfvROs79wiS0bqiJNxfCZcRJJdg==", - "dev": true, - "dependencies": { - "@octokit/types": "^13.0.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@octokit/types": { - "version": "13.4.1", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.4.1.tgz", - "integrity": "sha512-Y73oOAzRBAUzR/iRAbGULzpNkX8vaxKCqEtg6K74Ff3w9f5apFnWtE/2nade7dMWWW3bS5Kkd6DJS4HF04xreg==", - "dev": true, - "dependencies": { - "@octokit/openapi-types": "^22.1.0" - } - }, "node_modules/@pkgr/core": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.1.1.tgz", @@ -3771,12 +3674,6 @@ } ] }, - "node_modules/before-after-hook": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-3.0.2.tgz", - "integrity": "sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A==", - "dev": true - }, "node_modules/bignumber.js": { "version": "9.1.1", "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.1.tgz", @@ -10422,12 +10319,6 @@ "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", "dev": true }, - "node_modules/universal-user-agent": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz", - "integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==", - "dev": true - }, "node_modules/universalify": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", diff --git a/package.json b/package.json index 03693ddd1ec..a4b1829b390 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "mongodb", - "version": "6.6.2", + "version": "6.7.0", "description": "The official MongoDB driver for Node.js", "main": "lib/index.js", "files": [ @@ -68,7 +68,6 @@ "@microsoft/api-extractor": "^7.43.1", "@microsoft/tsdoc-config": "^0.16.2", "@mongodb-js/zstd": "^1.2.0", - "@octokit/core": "^6.1.2", "@types/chai": "^4.3.14", "@types/chai-subset": "^1.3.5", "@types/express": "^4.17.21", @@ -149,8 +148,10 @@ "check:drivers-atlas-testing": "mocha --config test/mocha_mongodb.json test/atlas/drivers_atlas_testing.test.ts", "check:adl": "mocha --config test/mocha_mongodb.json test/manual/atlas-data-lake-testing", "check:aws": "nyc mocha --config test/mocha_mongodb.json test/integration/auth/mongodb_aws.test.ts", - "check:oidc": "mocha --config test/mocha_mongodb.json test/manual/mongodb_oidc.prose.test.ts", - "check:oidc-azure": "mocha --config test/mocha_mongodb.json test/integration/auth/mongodb_oidc_azure.prose.test.ts", + "check:oidc-auth": "mocha --config test/mocha_mongodb.json test/integration/auth/auth.spec.test.ts", + "check:oidc-test": "mocha --config test/mocha_mongodb.json test/integration/auth/mongodb_oidc.prose.test.ts", + "check:oidc-azure": "mocha --config test/mocha_mongodb.json test/integration/auth/mongodb_oidc_azure.prose.05.test.ts", + "check:oidc-gcp": "mocha --config test/mocha_mongodb.json test/integration/auth/mongodb_oidc_gcp.prose.06.test.ts", "check:ocsp": "mocha --config test/manual/mocharc.json test/manual/ocsp_support.test.js", "check:kerberos": "nyc mocha --config test/manual/mocharc.json test/manual/kerberos.test.ts", "check:tls": "mocha --config test/manual/mocharc.json test/manual/tls_support.test.ts", diff --git a/src/client-side-encryption/providers/azure.ts b/src/client-side-encryption/providers/azure.ts index bee6038bdd8..97a2665ee9a 100644 --- a/src/client-side-encryption/providers/azure.ts +++ b/src/client-side-encryption/providers/azure.ts @@ -1,9 +1,12 @@ import { type Document } from '../../bson'; -import { MongoCryptAzureKMSRequestError, MongoCryptKMSRequestNetworkTimeoutError } from '../errors'; +import { MongoNetworkTimeoutError } from '../../error'; +import { get } from '../../utils'; +import { MongoCryptAzureKMSRequestError } from '../errors'; import { type KMSProviders } from './index'; -import { get } from './utils'; const MINIMUM_TOKEN_REFRESH_IN_MILLISECONDS = 6000; +/** Base URL for getting Azure tokens. */ +export const AZURE_BASE_URL = 'http://169.254.169.254/metadata/identity/oauth2/token?'; /** * The access token that libmongocrypt expects for Azure kms. @@ -113,6 +116,19 @@ export interface AzureKMSRequestOptions { url?: URL | string; } +/** + * @internal + * Get the Azure endpoint URL. + */ +export function addAzureParams(url: URL, resource: string, username?: string): URL { + url.searchParams.append('api-version', '2018-02-01'); + url.searchParams.append('resource', resource); + if (username) { + url.searchParams.append('client_id', username); + } + return url; +} + /** * @internal * @@ -123,13 +139,8 @@ export function prepareRequest(options: AzureKMSRequestOptions): { headers: Document; url: URL; } { - const url = new URL( - options.url?.toString() ?? 'http://169.254.169.254/metadata/identity/oauth2/token' - ); - - url.searchParams.append('api-version', '2018-02-01'); - url.searchParams.append('resource', 'https://vault.azure.net'); - + const url = new URL(options.url?.toString() ?? AZURE_BASE_URL); + addAzureParams(url, 'https://vault.azure.net'); const headers = { ...options.headers, 'Content-Type': 'application/json', Metadata: true }; return { headers, url }; } @@ -152,7 +163,7 @@ export async function fetchAzureKMSToken( const response = await get(url, { headers }); return await parseResponse(response); } catch (error) { - if (error instanceof MongoCryptKMSRequestNetworkTimeoutError) { + if (error instanceof MongoNetworkTimeoutError) { throw new MongoCryptAzureKMSRequestError(`[Azure KMS] ${error.message}`); } throw error; diff --git a/src/client-side-encryption/providers/utils.ts b/src/client-side-encryption/providers/utils.ts deleted file mode 100644 index 8d5362c6993..00000000000 --- a/src/client-side-encryption/providers/utils.ts +++ /dev/null @@ -1,37 +0,0 @@ -import * as http from 'http'; -import { clearTimeout, setTimeout } from 'timers'; - -import { MongoCryptKMSRequestNetworkTimeoutError } from '../errors'; - -/** - * @internal - */ -export function get( - url: URL | string, - options: http.RequestOptions = {} -): Promise<{ body: string; status: number | undefined }> { - return new Promise((resolve, reject) => { - /* eslint-disable prefer-const */ - let timeoutId: NodeJS.Timeout; - const request = http - .get(url, options, response => { - response.setEncoding('utf8'); - let body = ''; - response.on('data', chunk => (body += chunk)); - response.on('end', () => { - clearTimeout(timeoutId); - resolve({ status: response.statusCode, body }); - }); - }) - .on('error', error => { - clearTimeout(timeoutId); - reject(error); - }) - .end(); - timeoutId = setTimeout(() => { - request.destroy( - new MongoCryptKMSRequestNetworkTimeoutError(`request timed out after 10 seconds`) - ); - }, 10000); - }); -} diff --git a/src/cmap/auth/mongo_credentials.ts b/src/cmap/auth/mongo_credentials.ts index c086afb4e7e..3438886eff6 100644 --- a/src/cmap/auth/mongo_credentials.ts +++ b/src/cmap/auth/mongo_credentials.ts @@ -3,12 +3,11 @@ import type { Document } from '../../bson'; import { MongoAPIError, - MongoAzureError, MongoInvalidArgumentError, MongoMissingCredentialsError } from '../../error'; import { GSSAPICanonicalizationValue } from './gssapi'; -import type { OIDCRefreshFunction, OIDCRequestFunction } from './mongodb_oidc'; +import type { OIDCCallbackFunction } from './mongodb_oidc'; import { AUTH_MECHS_AUTH_SRC_EXTERNAL, AuthMechanism } from './providers'; // https://github.com/mongodb/specifications/blob/master/source/auth/auth.rst @@ -32,12 +31,17 @@ function getDefaultAuthMechanism(hello: Document | null): AuthMechanism { return AuthMechanism.MONGODB_CR; } -const ALLOWED_PROVIDER_NAMES: AuthMechanismProperties['PROVIDER_NAME'][] = ['aws', 'azure']; +const ALLOWED_ENVIRONMENT_NAMES: AuthMechanismProperties['ENVIRONMENT'][] = [ + 'test', + 'azure', + 'gcp' +]; const ALLOWED_HOSTS_ERROR = 'Auth mechanism property ALLOWED_HOSTS must be an array of strings.'; /** @internal */ export const DEFAULT_ALLOWED_HOSTS = [ '*.mongodb.net', + '*.mongodb-qa.net', '*.mongodb-dev.net', '*.mongodbgov.net', 'localhost', @@ -46,8 +50,8 @@ export const DEFAULT_ALLOWED_HOSTS = [ ]; /** Error for when the token audience is missing in the environment. */ -const TOKEN_AUDIENCE_MISSING_ERROR = - 'TOKEN_AUDIENCE must be set in the auth mechanism properties when PROVIDER_NAME is azure.'; +const TOKEN_RESOURCE_MISSING_ERROR = + 'TOKEN_RESOURCE must be set in the auth mechanism properties when ENVIRONMENT is azure or gcp.'; /** @public */ export interface AuthMechanismProperties extends Document { @@ -56,16 +60,16 @@ export interface AuthMechanismProperties extends Document { SERVICE_REALM?: string; CANONICALIZE_HOST_NAME?: GSSAPICanonicalizationValue; AWS_SESSION_TOKEN?: string; - /** @experimental */ - REQUEST_TOKEN_CALLBACK?: OIDCRequestFunction; - /** @experimental */ - REFRESH_TOKEN_CALLBACK?: OIDCRefreshFunction; - /** @experimental */ - PROVIDER_NAME?: 'aws' | 'azure'; - /** @experimental */ + /** A user provided OIDC machine callback function. */ + OIDC_CALLBACK?: OIDCCallbackFunction; + /** A user provided OIDC human interacted callback function. */ + OIDC_HUMAN_CALLBACK?: OIDCCallbackFunction; + /** The OIDC environment. Note that 'test' is for internal use only. */ + ENVIRONMENT?: 'test' | 'azure' | 'gcp'; + /** Allowed hosts that OIDC auth can connect to. */ ALLOWED_HOSTS?: string[]; - /** @experimental */ - TOKEN_AUDIENCE?: string; + /** The resource token for OIDC auth in Azure and GCP. */ + TOKEN_RESOURCE?: string; } /** @public */ @@ -179,45 +183,48 @@ export class MongoCredentials { } if (this.mechanism === AuthMechanism.MONGODB_OIDC) { - if (this.username && this.mechanismProperties.PROVIDER_NAME) { + if ( + this.username && + this.mechanismProperties.ENVIRONMENT && + this.mechanismProperties.ENVIRONMENT !== 'azure' + ) { throw new MongoInvalidArgumentError( - `username and PROVIDER_NAME may not be used together for mechanism '${this.mechanism}'.` + `username and ENVIRONMENT '${this.mechanismProperties.ENVIRONMENT}' may not be used together for mechanism '${this.mechanism}'.` ); } - if ( - this.mechanismProperties.PROVIDER_NAME === 'azure' && - !this.mechanismProperties.TOKEN_AUDIENCE - ) { - throw new MongoAzureError(TOKEN_AUDIENCE_MISSING_ERROR); + if (this.username && this.password) { + throw new MongoInvalidArgumentError( + `No password is allowed in ENVIRONMENT '${this.mechanismProperties.ENVIRONMENT}' for '${this.mechanism}'.` + ); } if ( - this.mechanismProperties.PROVIDER_NAME && - !ALLOWED_PROVIDER_NAMES.includes(this.mechanismProperties.PROVIDER_NAME) + (this.mechanismProperties.ENVIRONMENT === 'azure' || + this.mechanismProperties.ENVIRONMENT === 'gcp') && + !this.mechanismProperties.TOKEN_RESOURCE ) { - throw new MongoInvalidArgumentError( - `Currently only a PROVIDER_NAME in ${ALLOWED_PROVIDER_NAMES.join( - ',' - )} is supported for mechanism '${this.mechanism}'.` - ); + throw new MongoInvalidArgumentError(TOKEN_RESOURCE_MISSING_ERROR); } if ( - this.mechanismProperties.REFRESH_TOKEN_CALLBACK && - !this.mechanismProperties.REQUEST_TOKEN_CALLBACK + this.mechanismProperties.ENVIRONMENT && + !ALLOWED_ENVIRONMENT_NAMES.includes(this.mechanismProperties.ENVIRONMENT) ) { throw new MongoInvalidArgumentError( - `A REQUEST_TOKEN_CALLBACK must be provided when using a REFRESH_TOKEN_CALLBACK for mechanism '${this.mechanism}'` + `Currently only a ENVIRONMENT in ${ALLOWED_ENVIRONMENT_NAMES.join( + ',' + )} is supported for mechanism '${this.mechanism}'.` ); } if ( - !this.mechanismProperties.PROVIDER_NAME && - !this.mechanismProperties.REQUEST_TOKEN_CALLBACK + !this.mechanismProperties.ENVIRONMENT && + !this.mechanismProperties.OIDC_CALLBACK && + !this.mechanismProperties.OIDC_HUMAN_CALLBACK ) { throw new MongoInvalidArgumentError( - `Either a PROVIDER_NAME or a REQUEST_TOKEN_CALLBACK must be specified for mechanism '${this.mechanism}'.` + `Either a ENVIRONMENT, OIDC_CALLBACK, or OIDC_HUMAN_CALLBACK must be specified for mechanism '${this.mechanism}'.` ); } diff --git a/src/cmap/auth/mongodb_oidc.ts b/src/cmap/auth/mongodb_oidc.ts index f3584c4893e..e44436b5ab9 100644 --- a/src/cmap/auth/mongodb_oidc.ts +++ b/src/cmap/auth/mongodb_oidc.ts @@ -5,64 +5,93 @@ import type { HandshakeDocument } from '../connect'; import type { Connection } from '../connection'; import { type AuthContext, AuthProvider } from './auth_provider'; import type { MongoCredentials } from './mongo_credentials'; -import { AwsServiceWorkflow } from './mongodb_oidc/aws_service_workflow'; -import { AzureServiceWorkflow } from './mongodb_oidc/azure_service_workflow'; -import { CallbackWorkflow } from './mongodb_oidc/callback_workflow'; +import { AzureMachineWorkflow } from './mongodb_oidc/azure_machine_workflow'; +import { GCPMachineWorkflow } from './mongodb_oidc/gcp_machine_workflow'; +import { TokenCache } from './mongodb_oidc/token_cache'; +import { TokenMachineWorkflow } from './mongodb_oidc/token_machine_workflow'; /** Error when credentials are missing. */ const MISSING_CREDENTIALS_ERROR = 'AuthContext must provide credentials.'; /** + * The information returned by the server on the IDP server. * @public - * @experimental */ -export interface IdPServerInfo { +export interface IdPInfo { + /** + * A URL which describes the Authentication Server. This identifier should + * be the iss of provided access tokens, and be viable for RFC8414 metadata + * discovery and RFC9207 identification. + */ issuer: string; + /** A unique client ID for this OIDC client. */ clientId: string; + /** A list of additional scopes to request from IdP. */ requestScopes?: string[]; } /** + * The response from the IdP server with the access token and + * optional expiration time and refresh token. * @public - * @experimental */ export interface IdPServerResponse { + /** The OIDC access token. */ accessToken: string; + /** The time when the access token expires. For future use. */ expiresInSeconds?: number; + /** The refresh token, if applicable, to be used by the callback to request a new token from the issuer. */ refreshToken?: string; } /** + * The response required to be returned from the machine or + * human callback workflows' callback. * @public - * @experimental */ -export interface OIDCCallbackContext { +export interface OIDCResponse { + /** The OIDC access token. */ + accessToken: string; + /** The time when the access token expires. For future use. */ + expiresInSeconds?: number; + /** The refresh token, if applicable, to be used by the callback to request a new token from the issuer. */ refreshToken?: string; - timeoutSeconds?: number; - timeoutContext?: AbortSignal; - version: number; } /** + * The parameters that the driver provides to the user supplied + * human or machine callback. + * + * The version number is used to communicate callback API changes that are not breaking but that + * users may want to know about and review their implementation. Users may wish to check the version + * number and throw an error if their expected version number and the one provided do not match. * @public - * @experimental */ -export type OIDCRequestFunction = ( - info: IdPServerInfo, - context: OIDCCallbackContext -) => Promise; +export interface OIDCCallbackParams { + /** Optional username. */ + username?: string; + /** The context in which to timeout the OIDC callback. */ + timeoutContext: AbortSignal; + /** The current OIDC API version. */ + version: 1; + /** The IdP information returned from the server. */ + idpInfo?: IdPInfo; + /** The refresh token, if applicable, to be used by the callback to request a new token from the issuer. */ + refreshToken?: string; +} /** + * The signature of the human or machine callback functions. * @public - * @experimental */ -export type OIDCRefreshFunction = ( - info: IdPServerInfo, - context: OIDCCallbackContext -) => Promise; +export type OIDCCallbackFunction = (params: OIDCCallbackParams) => Promise; + +/** The current version of OIDC implementation. */ +export const OIDC_VERSION = 1; -type ProviderName = 'aws' | 'azure' | 'callback'; +type EnvironmentName = 'test' | 'azure' | 'gcp' | undefined; +/** @internal */ export interface Workflow { /** * All device workflows must implement this method in order to get the access @@ -71,32 +100,41 @@ export interface Workflow { execute( connection: Connection, credentials: MongoCredentials, - reauthenticating: boolean, response?: Document - ): Promise; + ): Promise; + + /** + * Each workflow should specify the correct custom behaviour for reauthentication. + */ + reauthenticate(connection: Connection, credentials: MongoCredentials): Promise; /** * Get the document to add for speculative authentication. */ - speculativeAuth(credentials: MongoCredentials): Promise; + speculativeAuth(connection: Connection, credentials: MongoCredentials): Promise; } /** @internal */ -export const OIDC_WORKFLOWS: Map = new Map(); -OIDC_WORKFLOWS.set('callback', new CallbackWorkflow()); -OIDC_WORKFLOWS.set('aws', new AwsServiceWorkflow()); -OIDC_WORKFLOWS.set('azure', new AzureServiceWorkflow()); +export const OIDC_WORKFLOWS: Map Workflow> = new Map(); +OIDC_WORKFLOWS.set('test', () => new TokenMachineWorkflow(new TokenCache())); +OIDC_WORKFLOWS.set('azure', () => new AzureMachineWorkflow(new TokenCache())); +OIDC_WORKFLOWS.set('gcp', () => new GCPMachineWorkflow(new TokenCache())); /** * OIDC auth provider. - * @experimental */ export class MongoDBOIDC extends AuthProvider { + workflow: Workflow; + /** * Instantiate the auth provider. */ - constructor() { + constructor(workflow?: Workflow) { super(); + if (!workflow) { + throw new MongoInvalidArgumentError('No workflow provided to the OIDC auth provider.'); + } + this.workflow = workflow; } /** @@ -104,9 +142,15 @@ export class MongoDBOIDC extends AuthProvider { */ override async auth(authContext: AuthContext): Promise { const { connection, reauthenticating, response } = authContext; + if (response?.speculativeAuthenticate?.done) { + return; + } const credentials = getCredentials(authContext); - const workflow = getWorkflow(credentials); - await workflow.execute(connection, credentials, reauthenticating, response); + if (reauthenticating) { + await this.workflow.reauthenticate(connection, credentials); + } else { + await this.workflow.execute(connection, credentials, response); + } } /** @@ -116,9 +160,9 @@ export class MongoDBOIDC extends AuthProvider { handshakeDoc: HandshakeDocument, authContext: AuthContext ): Promise { + const { connection } = authContext; const credentials = getCredentials(authContext); - const workflow = getWorkflow(credentials); - const result = await workflow.speculativeAuth(credentials); + const result = await this.workflow.speculativeAuth(connection, credentials); return { ...handshakeDoc, ...result }; } } @@ -133,17 +177,3 @@ function getCredentials(authContext: AuthContext): MongoCredentials { } return credentials; } - -/** - * Gets either a device workflow or callback workflow. - */ -function getWorkflow(credentials: MongoCredentials): Workflow { - const providerName = credentials.mechanismProperties.PROVIDER_NAME; - const workflow = OIDC_WORKFLOWS.get(providerName || 'callback'); - if (!workflow) { - throw new MongoInvalidArgumentError( - `Could not load workflow for provider ${credentials.mechanismProperties.PROVIDER_NAME}` - ); - } - return workflow; -} diff --git a/src/cmap/auth/mongodb_oidc/automated_callback_workflow.ts b/src/cmap/auth/mongodb_oidc/automated_callback_workflow.ts new file mode 100644 index 00000000000..f98d87f6a27 --- /dev/null +++ b/src/cmap/auth/mongodb_oidc/automated_callback_workflow.ts @@ -0,0 +1,82 @@ +import { MONGODB_ERROR_CODES, MongoError, MongoOIDCError } from '../../../error'; +import { Timeout, TimeoutError } from '../../../timeout'; +import { type Connection } from '../../connection'; +import { type MongoCredentials } from '../mongo_credentials'; +import { + OIDC_VERSION, + type OIDCCallbackFunction, + type OIDCCallbackParams, + type OIDCResponse +} from '../mongodb_oidc'; +import { AUTOMATED_TIMEOUT_MS, CallbackWorkflow } from './callback_workflow'; +import { type TokenCache } from './token_cache'; + +/** + * Class implementing behaviour for the non human callback workflow. + * @internal + */ +export class AutomatedCallbackWorkflow extends CallbackWorkflow { + /** + * Instantiate the human callback workflow. + */ + constructor(cache: TokenCache, callback: OIDCCallbackFunction) { + super(cache, callback); + } + + /** + * Execute the OIDC callback workflow. + */ + async execute(connection: Connection, credentials: MongoCredentials): Promise { + // If there is a cached access token, try to authenticate with it. If + // authentication fails with an Authentication error (18), + // invalidate the access token, fetch a new access token, and try + // to authenticate again. + // If the server fails for any other reason, do not clear the cache. + if (this.cache.hasAccessToken) { + const token = this.cache.getAccessToken(); + try { + return await this.finishAuthentication(connection, credentials, token); + } catch (error) { + if ( + error instanceof MongoError && + error.code === MONGODB_ERROR_CODES.AuthenticationFailed + ) { + this.cache.removeAccessToken(); + return await this.execute(connection, credentials); + } else { + throw error; + } + } + } + const response = await this.fetchAccessToken(credentials); + this.cache.put(response); + connection.accessToken = response.accessToken; + await this.finishAuthentication(connection, credentials, response.accessToken); + } + + /** + * Fetches the access token using the callback. + */ + protected async fetchAccessToken(credentials: MongoCredentials): Promise { + const controller = new AbortController(); + const params: OIDCCallbackParams = { + timeoutContext: controller.signal, + version: OIDC_VERSION + }; + if (credentials.username) { + params.username = credentials.username; + } + const timeout = Timeout.expires(AUTOMATED_TIMEOUT_MS); + try { + return await Promise.race([this.executeAndValidateCallback(params), timeout]); + } catch (error) { + if (TimeoutError.is(error)) { + controller.abort(); + throw new MongoOIDCError(`OIDC callback timed out after ${AUTOMATED_TIMEOUT_MS}ms.`); + } + throw error; + } finally { + timeout.clear(); + } + } +} diff --git a/src/cmap/auth/mongodb_oidc/aws_service_workflow.ts b/src/cmap/auth/mongodb_oidc/aws_service_workflow.ts deleted file mode 100644 index 984608d899f..00000000000 --- a/src/cmap/auth/mongodb_oidc/aws_service_workflow.ts +++ /dev/null @@ -1,29 +0,0 @@ -import * as fs from 'fs'; - -import { MongoAWSError } from '../../../error'; -import { ServiceWorkflow } from './service_workflow'; - -/** Error for when the token is missing in the environment. */ -const TOKEN_MISSING_ERROR = 'AWS_WEB_IDENTITY_TOKEN_FILE must be set in the environment.'; - -/** - * Device workflow implementation for AWS. - * - * @internal - */ -export class AwsServiceWorkflow extends ServiceWorkflow { - constructor() { - super(); - } - - /** - * Get the token from the environment. - */ - async getToken(): Promise { - const tokenFile = process.env.AWS_WEB_IDENTITY_TOKEN_FILE; - if (!tokenFile) { - throw new MongoAWSError(TOKEN_MISSING_ERROR); - } - return await fs.promises.readFile(tokenFile, 'utf8'); - } -} diff --git a/src/cmap/auth/mongodb_oidc/azure_machine_workflow.ts b/src/cmap/auth/mongodb_oidc/azure_machine_workflow.ts new file mode 100644 index 00000000000..1f41b8dc08d --- /dev/null +++ b/src/cmap/auth/mongodb_oidc/azure_machine_workflow.ts @@ -0,0 +1,85 @@ +import { addAzureParams, AZURE_BASE_URL } from '../../../client-side-encryption/providers/azure'; +import { MongoAzureError } from '../../../error'; +import { get } from '../../../utils'; +import type { MongoCredentials } from '../mongo_credentials'; +import { type AccessToken, MachineWorkflow } from './machine_workflow'; +import { type TokenCache } from './token_cache'; + +/** Azure request headers. */ +const AZURE_HEADERS = Object.freeze({ Metadata: 'true', Accept: 'application/json' }); + +/** Invalid endpoint result error. */ +const ENDPOINT_RESULT_ERROR = + 'Azure endpoint did not return a value with only access_token and expires_in properties'; + +/** Error for when the token audience is missing in the environment. */ +const TOKEN_RESOURCE_MISSING_ERROR = + 'TOKEN_RESOURCE must be set in the auth mechanism properties when ENVIRONMENT is azure.'; + +/** + * Device workflow implementation for Azure. + * + * @internal + */ +export class AzureMachineWorkflow extends MachineWorkflow { + /** + * Instantiate the machine workflow. + */ + constructor(cache: TokenCache) { + super(cache); + } + + /** + * Get the token from the environment. + */ + async getToken(credentials?: MongoCredentials): Promise { + const tokenAudience = credentials?.mechanismProperties.TOKEN_RESOURCE; + const username = credentials?.username; + if (!tokenAudience) { + throw new MongoAzureError(TOKEN_RESOURCE_MISSING_ERROR); + } + const response = await getAzureTokenData(tokenAudience, username); + if (!isEndpointResultValid(response)) { + throw new MongoAzureError(ENDPOINT_RESULT_ERROR); + } + return response; + } +} + +/** + * Hit the Azure endpoint to get the token data. + */ +async function getAzureTokenData(tokenAudience: string, username?: string): Promise { + const url = new URL(AZURE_BASE_URL); + addAzureParams(url, tokenAudience, username); + const response = await get(url, { + headers: AZURE_HEADERS + }); + if (response.status !== 200) { + throw new MongoAzureError( + `Status code ${response.status} returned from the Azure endpoint. Response body: ${response.body}` + ); + } + const result = JSON.parse(response.body); + return { + access_token: result.access_token, + expires_in: Number(result.expires_in) + }; +} + +/** + * Determines if a result returned from the endpoint is valid. + * This means the result is not nullish, contains the access_token required field + * and the expires_in required field. + */ +function isEndpointResultValid( + token: unknown +): token is { access_token: unknown; expires_in: unknown } { + if (token == null || typeof token !== 'object') return false; + return ( + 'access_token' in token && + typeof token.access_token === 'string' && + 'expires_in' in token && + typeof token.expires_in === 'number' + ); +} diff --git a/src/cmap/auth/mongodb_oidc/azure_service_workflow.ts b/src/cmap/auth/mongodb_oidc/azure_service_workflow.ts deleted file mode 100644 index fadbf5e9fd9..00000000000 --- a/src/cmap/auth/mongodb_oidc/azure_service_workflow.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { MongoAzureError } from '../../../error'; -import { request } from '../../../utils'; -import type { MongoCredentials } from '../mongo_credentials'; -import { AzureTokenCache } from './azure_token_cache'; -import { ServiceWorkflow } from './service_workflow'; - -/** Base URL for getting Azure tokens. */ -const AZURE_BASE_URL = - 'http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01'; - -/** Azure request headers. */ -const AZURE_HEADERS = Object.freeze({ Metadata: 'true', Accept: 'application/json' }); - -/** Invalid endpoint result error. */ -const ENDPOINT_RESULT_ERROR = - 'Azure endpoint did not return a value with only access_token and expires_in properties'; - -/** Error for when the token audience is missing in the environment. */ -const TOKEN_AUDIENCE_MISSING_ERROR = - 'TOKEN_AUDIENCE must be set in the auth mechanism properties when PROVIDER_NAME is azure.'; - -/** - * The Azure access token format. - * @internal - */ -export interface AzureAccessToken { - access_token: string; - expires_in: number; -} - -/** - * Device workflow implementation for Azure. - * - * @internal - */ -export class AzureServiceWorkflow extends ServiceWorkflow { - cache = new AzureTokenCache(); - - /** - * Get the token from the environment. - */ - async getToken(credentials?: MongoCredentials): Promise { - const tokenAudience = credentials?.mechanismProperties.TOKEN_AUDIENCE; - if (!tokenAudience) { - throw new MongoAzureError(TOKEN_AUDIENCE_MISSING_ERROR); - } - let token; - const entry = this.cache.getEntry(tokenAudience); - if (entry?.isValid()) { - token = entry.token; - } else { - this.cache.deleteEntry(tokenAudience); - const response = await getAzureTokenData(tokenAudience); - if (!isEndpointResultValid(response)) { - throw new MongoAzureError(ENDPOINT_RESULT_ERROR); - } - this.cache.addEntry(tokenAudience, response); - token = response.access_token; - } - return token; - } -} - -/** - * Hit the Azure endpoint to get the token data. - */ -async function getAzureTokenData(tokenAudience: string): Promise { - const url = `${AZURE_BASE_URL}&resource=${tokenAudience}`; - const data = await request(url, { - json: true, - headers: AZURE_HEADERS - }); - return data as AzureAccessToken; -} - -/** - * Determines if a result returned from the endpoint is valid. - * This means the result is not nullish, contains the access_token required field - * and the expires_in required field. - */ -function isEndpointResultValid( - token: unknown -): token is { access_token: unknown; expires_in: unknown } { - if (token == null || typeof token !== 'object') return false; - return 'access_token' in token && 'expires_in' in token; -} diff --git a/src/cmap/auth/mongodb_oidc/azure_token_cache.ts b/src/cmap/auth/mongodb_oidc/azure_token_cache.ts deleted file mode 100644 index f68725120e8..00000000000 --- a/src/cmap/auth/mongodb_oidc/azure_token_cache.ts +++ /dev/null @@ -1,51 +0,0 @@ -import type { AzureAccessToken } from './azure_service_workflow'; -import { Cache, ExpiringCacheEntry } from './cache'; - -/** @internal */ -export class AzureTokenEntry extends ExpiringCacheEntry { - token: string; - - /** - * Instantiate the entry. - */ - constructor(token: string, expiration: number) { - super(expiration); - this.token = token; - } -} - -/** - * A cache of access tokens from Azure. - * @internal - */ -export class AzureTokenCache extends Cache { - /** - * Add an entry to the cache. - */ - addEntry(tokenAudience: string, token: AzureAccessToken): AzureTokenEntry { - const entry = new AzureTokenEntry(token.access_token, token.expires_in); - this.entries.set(tokenAudience, entry); - return entry; - } - - /** - * Create a cache key. - */ - cacheKey(tokenAudience: string): string { - return tokenAudience; - } - - /** - * Delete an entry from the cache. - */ - deleteEntry(tokenAudience: string): void { - this.entries.delete(tokenAudience); - } - - /** - * Get an Azure token entry from the cache. - */ - getEntry(tokenAudience: string): AzureTokenEntry | undefined { - return this.entries.get(tokenAudience); - } -} diff --git a/src/cmap/auth/mongodb_oidc/cache.ts b/src/cmap/auth/mongodb_oidc/cache.ts deleted file mode 100644 index e23685b3bca..00000000000 --- a/src/cmap/auth/mongodb_oidc/cache.ts +++ /dev/null @@ -1,63 +0,0 @@ -/* 5 minutes in milliseconds */ -const EXPIRATION_BUFFER_MS = 300000; - -/** - * An entry in a cache that can expire in a certain amount of time. - */ -export abstract class ExpiringCacheEntry { - expiration: number; - - /** - * Create a new expiring token entry. - */ - constructor(expiration: number) { - this.expiration = this.expirationTime(expiration); - } - /** - * The entry is still valid if the expiration is more than - * 5 minutes from the expiration time. - */ - isValid() { - return this.expiration - Date.now() > EXPIRATION_BUFFER_MS; - } - - /** - * Get an expiration time in milliseconds past epoch. - */ - private expirationTime(expiresInSeconds: number): number { - return Date.now() + expiresInSeconds * 1000; - } -} - -/** - * Base class for OIDC caches. - */ -export abstract class Cache { - entries: Map; - - /** - * Create a new cache. - */ - constructor() { - this.entries = new Map(); - } - - /** - * Clear the cache. - */ - clear() { - this.entries.clear(); - } - - /** - * Implement the cache key for the token. - */ - abstract cacheKey(address: string, username: string, callbackHash: string): string; - - /** - * Create a cache key from the address and username. - */ - hashedCacheKey(address: string, username: string, callbackHash: string): string { - return JSON.stringify([address, username, callbackHash]); - } -} diff --git a/src/cmap/auth/mongodb_oidc/callback_lock_cache.ts b/src/cmap/auth/mongodb_oidc/callback_lock_cache.ts deleted file mode 100644 index 9518c9d381f..00000000000 --- a/src/cmap/auth/mongodb_oidc/callback_lock_cache.ts +++ /dev/null @@ -1,115 +0,0 @@ -import { MongoInvalidArgumentError } from '../../../error'; -import type { Connection } from '../../connection'; -import type { MongoCredentials } from '../mongo_credentials'; -import type { - IdPServerInfo, - IdPServerResponse, - OIDCCallbackContext, - OIDCRefreshFunction, - OIDCRequestFunction -} from '../mongodb_oidc'; -import { Cache } from './cache'; - -/** Error message for when request callback is missing. */ -const REQUEST_CALLBACK_REQUIRED_ERROR = - 'Auth mechanism property REQUEST_TOKEN_CALLBACK is required.'; -/* Counter for function "hashes".*/ -let FN_HASH_COUNTER = 0; -/* No function present function */ -const NO_FUNCTION: OIDCRequestFunction = async () => ({ accessToken: 'test' }); -/* The map of function hashes */ -const FN_HASHES = new WeakMap(); -/* Put the no function hash in the map. */ -FN_HASHES.set(NO_FUNCTION, FN_HASH_COUNTER); - -/** - * An entry of callbacks in the cache. - */ -interface CallbacksEntry { - requestCallback: OIDCRequestFunction; - refreshCallback?: OIDCRefreshFunction; - callbackHash: string; -} - -/** - * A cache of request and refresh callbacks per server/user. - */ -export class CallbackLockCache extends Cache { - /** - * Get the callbacks for the connection and credentials. If an entry does not - * exist a new one will get set. - */ - getEntry(connection: Connection, credentials: MongoCredentials): CallbacksEntry { - const requestCallback = credentials.mechanismProperties.REQUEST_TOKEN_CALLBACK; - const refreshCallback = credentials.mechanismProperties.REFRESH_TOKEN_CALLBACK; - if (!requestCallback) { - throw new MongoInvalidArgumentError(REQUEST_CALLBACK_REQUIRED_ERROR); - } - const callbackHash = hashFunctions(requestCallback, refreshCallback); - const key = this.cacheKey(connection.address, credentials.username, callbackHash); - const entry = this.entries.get(key); - if (entry) { - return entry; - } - return this.addEntry(key, callbackHash, requestCallback, refreshCallback); - } - - /** - * Set locked callbacks on for connection and credentials. - */ - private addEntry( - key: string, - callbackHash: string, - requestCallback: OIDCRequestFunction, - refreshCallback?: OIDCRefreshFunction - ): CallbacksEntry { - const entry = { - requestCallback: withLock(requestCallback), - refreshCallback: refreshCallback ? withLock(refreshCallback) : undefined, - callbackHash: callbackHash - }; - this.entries.set(key, entry); - return entry; - } - - /** - * Create a cache key from the address and username. - */ - cacheKey(address: string, username: string, callbackHash: string): string { - return this.hashedCacheKey(address, username, callbackHash); - } -} - -/** - * Ensure the callback is only executed one at a time. - */ -function withLock(callback: OIDCRequestFunction | OIDCRefreshFunction) { - let lock: Promise = Promise.resolve(); - return async (info: IdPServerInfo, context: OIDCCallbackContext): Promise => { - await lock; - // eslint-disable-next-line github/no-then - lock = lock.then(() => callback(info, context)); - return await lock; - }; -} - -/** - * Get the hash string for the request and refresh functions. - */ -function hashFunctions(requestFn: OIDCRequestFunction, refreshFn?: OIDCRefreshFunction): string { - let requestHash = FN_HASHES.get(requestFn); - let refreshHash = FN_HASHES.get(refreshFn ?? NO_FUNCTION); - if (requestHash == null) { - // Create a new one for the function and put it in the map. - FN_HASH_COUNTER++; - requestHash = FN_HASH_COUNTER; - FN_HASHES.set(requestFn, FN_HASH_COUNTER); - } - if (refreshHash == null && refreshFn) { - // Create a new one for the function and put it in the map. - FN_HASH_COUNTER++; - refreshHash = FN_HASH_COUNTER; - FN_HASHES.set(refreshFn, FN_HASH_COUNTER); - } - return `${requestHash}-${refreshHash}`; -} diff --git a/src/cmap/auth/mongodb_oidc/callback_workflow.ts b/src/cmap/auth/mongodb_oidc/callback_workflow.ts index 9822fd1e505..4f273367f2b 100644 --- a/src/cmap/auth/mongodb_oidc/callback_workflow.ts +++ b/src/cmap/auth/mongodb_oidc/callback_workflow.ts @@ -1,26 +1,23 @@ -import { Binary, BSON, type Document } from 'bson'; +import { type Document } from 'bson'; +import { setTimeout } from 'timers/promises'; -import { MONGODB_ERROR_CODES, MongoError, MongoMissingCredentialsError } from '../../../error'; +import { MongoMissingCredentialsError } from '../../../error'; import { ns } from '../../../utils'; import type { Connection } from '../../connection'; import type { MongoCredentials } from '../mongo_credentials'; -import type { - IdPServerInfo, - IdPServerResponse, - OIDCCallbackContext, - OIDCRefreshFunction, - OIDCRequestFunction, - Workflow +import { + type OIDCCallbackFunction, + type OIDCCallbackParams, + type OIDCResponse, + type Workflow } from '../mongodb_oidc'; -import { AuthMechanism } from '../providers'; -import { CallbackLockCache } from './callback_lock_cache'; -import { TokenEntryCache } from './token_entry_cache'; +import { finishCommandDocument, startCommandDocument } from './command_builders'; +import { type TokenCache } from './token_cache'; -/** The current version of OIDC implementation. */ -const OIDC_VERSION = 0; - -/** 5 minutes in seconds */ -const TIMEOUT_S = 300; +/** 5 minutes in milliseconds */ +export const HUMAN_TIMEOUT_MS = 300000; +/** 1 minute in milliseconds */ +export const AUTOMATED_TIMEOUT_MS = 60000; /** Properties allowed on results of callbacks. */ const RESULT_PROPERTIES = ['accessToken', 'expiresInSeconds', 'refreshToken']; @@ -29,138 +26,89 @@ const RESULT_PROPERTIES = ['accessToken', 'expiresInSeconds', 'refreshToken']; const CALLBACK_RESULT_ERROR = 'User provided OIDC callbacks must return a valid object with an accessToken.'; +/** The time to throttle callback calls. */ +const THROTTLE_MS = 100; + /** * OIDC implementation of a callback based workflow. * @internal */ -export class CallbackWorkflow implements Workflow { - cache: TokenEntryCache; - callbackCache: CallbackLockCache; +export abstract class CallbackWorkflow implements Workflow { + cache: TokenCache; + callback: OIDCCallbackFunction; + lastExecutionTime: number; /** - * Instantiate the workflow + * Instantiate the callback workflow. */ - constructor() { - this.cache = new TokenEntryCache(); - this.callbackCache = new CallbackLockCache(); + constructor(cache: TokenCache, callback: OIDCCallbackFunction) { + this.cache = cache; + this.callback = this.withLock(callback); + this.lastExecutionTime = Date.now() - THROTTLE_MS; } /** * Get the document to add for speculative authentication. This also needs * to add a db field from the credentials source. */ - async speculativeAuth(credentials: MongoCredentials): Promise { - const document = startCommandDocument(credentials); - document.db = credentials.source; - return { speculativeAuthenticate: document }; + async speculativeAuth(connection: Connection, credentials: MongoCredentials): Promise { + // Check if the Client Cache has an access token. + // If it does, cache the access token in the Connection Cache and send a JwtStepRequest + // with the cached access token in the speculative authentication SASL payload. + if (this.cache.hasAccessToken) { + const accessToken = this.cache.getAccessToken(); + connection.accessToken = accessToken; + const document = finishCommandDocument(accessToken); + document.db = credentials.source; + return { speculativeAuthenticate: document }; + } + return {}; } /** - * Execute the OIDC callback workflow. + * Reauthenticate the callback workflow. For this we invalidated the access token + * in the cache and run the authentication steps again. No initial handshake needs + * to be sent. */ - async execute( - connection: Connection, - credentials: MongoCredentials, - reauthenticating: boolean, - response?: Document - ): Promise { - // Get the callbacks with locks from the callback lock cache. - const { requestCallback, refreshCallback, callbackHash } = this.callbackCache.getEntry( - connection, - credentials - ); - // Look for an existing entry in the cache. - const entry = this.cache.getEntry(connection.address, credentials.username, callbackHash); - let result; - if (entry) { - // Reauthentication cannot use a token from the cache since the server has - // stated it is invalid by the request for reauthentication. - if (entry.isValid() && !reauthenticating) { - // Presence of a valid cache entry means we can skip to the finishing step. - result = await this.finishAuthentication( - connection, - credentials, - entry.tokenResult, - response?.speculativeAuthenticate?.conversationId - ); + async reauthenticate(connection: Connection, credentials: MongoCredentials): Promise { + if (this.cache.hasAccessToken) { + // Reauthentication implies the token has expired. + if (connection.accessToken === this.cache.getAccessToken()) { + // If connection's access token is the same as the cache's, remove + // the token from the cache and connection. + this.cache.removeAccessToken(); + delete connection.accessToken; } else { - // Presence of an expired cache entry means we must fetch a new one and - // then execute the final step. - const tokenResult = await this.fetchAccessToken( - connection, - credentials, - entry.serverInfo, - reauthenticating, - callbackHash, - requestCallback, - refreshCallback - ); - try { - result = await this.finishAuthentication( - connection, - credentials, - tokenResult, - reauthenticating ? undefined : response?.speculativeAuthenticate?.conversationId - ); - } catch (error) { - // If we are reauthenticating and this errors with reauthentication - // required, we need to do the entire process over again and clear - // the cache entry. - if ( - reauthenticating && - error instanceof MongoError && - error.code === MONGODB_ERROR_CODES.Reauthenticate - ) { - this.cache.deleteEntry(connection.address, credentials.username, callbackHash); - result = await this.execute(connection, credentials, reauthenticating); - } else { - throw error; - } - } + // If the connection's access token is different from the cache's, set + // the cache's token on the connection and do not remove from the + // cache. + connection.accessToken = this.cache.getAccessToken(); } - } else { - // No entry in the cache requires us to do all authentication steps - // from start to finish, including getting a fresh token for the cache. - const startDocument = await this.startAuthentication( - connection, - credentials, - reauthenticating, - response - ); - const conversationId = startDocument.conversationId; - const serverResult = BSON.deserialize(startDocument.payload.buffer) as IdPServerInfo; - const tokenResult = await this.fetchAccessToken( - connection, - credentials, - serverResult, - reauthenticating, - callbackHash, - requestCallback, - refreshCallback - ); - result = await this.finishAuthentication( - connection, - credentials, - tokenResult, - conversationId - ); } - return result; + await this.execute(connection, credentials); } + /** + * Execute the OIDC callback workflow. + */ + abstract execute( + connection: Connection, + credentials: MongoCredentials, + response?: Document + ): Promise; + /** * Starts the callback authentication process. If there is a speculative * authentication document from the initial handshake, then we will use that * value to get the issuer, otherwise we will send the saslStart command. */ - private async startAuthentication( + protected async startAuthentication( connection: Connection, credentials: MongoCredentials, - reauthenticating: boolean, response?: Document ): Promise { let result; - if (!reauthenticating && response?.speculativeAuthenticate) { + if (response?.speculativeAuthenticate) { result = response.speculativeAuthenticate; } else { result = await connection.command( @@ -175,97 +123,57 @@ export class CallbackWorkflow implements Workflow { /** * Finishes the callback authentication process. */ - private async finishAuthentication( + protected async finishAuthentication( connection: Connection, credentials: MongoCredentials, - tokenResult: IdPServerResponse, + token: string, conversationId?: number - ): Promise { - const result = await connection.command( + ): Promise { + await connection.command( ns(credentials.source), - finishCommandDocument(tokenResult.accessToken, conversationId), + finishCommandDocument(token, conversationId), undefined ); - return result; } /** - * Fetches an access token using either the request or refresh callbacks and - * puts it in the cache. + * Executes the callback and validates the output. */ - private async fetchAccessToken( - connection: Connection, - credentials: MongoCredentials, - serverInfo: IdPServerInfo, - reauthenticating: boolean, - callbackHash: string, - requestCallback: OIDCRequestFunction, - refreshCallback?: OIDCRefreshFunction - ): Promise { - // Get the token from the cache. - const entry = this.cache.getEntry(connection.address, credentials.username, callbackHash); - let result; - const context: OIDCCallbackContext = { timeoutSeconds: TIMEOUT_S, version: OIDC_VERSION }; - // Check if there's a token in the cache. - if (entry) { - // If the cache entry is valid, return the token result. - if (entry.isValid() && !reauthenticating) { - return entry.tokenResult; - } - // If the cache entry is not valid, remove it from the cache and first attempt - // to use the refresh callback to get a new token. If no refresh callback - // exists, then fallback to the request callback. - if (refreshCallback) { - context.refreshToken = entry.tokenResult.refreshToken; - result = await refreshCallback(serverInfo, context); - } else { - result = await requestCallback(serverInfo, context); - } - } else { - // With no token in the cache we use the request callback. - result = await requestCallback(serverInfo, context); - } + protected async executeAndValidateCallback(params: OIDCCallbackParams): Promise { + const result = await this.callback(params); // Validate that the result returned by the callback is acceptable. If it is not // we must clear the token result from the cache. if (isCallbackResultInvalid(result)) { - this.cache.deleteEntry(connection.address, credentials.username, callbackHash); throw new MongoMissingCredentialsError(CALLBACK_RESULT_ERROR); } - // Cleanup the cache. - this.cache.deleteExpiredEntries(); - // Put the new entry into the cache. - this.cache.addEntry( - connection.address, - credentials.username || '', - callbackHash, - result, - serverInfo - ); return result; } -} -/** - * Generate the finishing command document for authentication. Will be a - * saslStart or saslContinue depending on the presence of a conversation id. - */ -function finishCommandDocument(token: string, conversationId?: number): Document { - if (conversationId != null && typeof conversationId === 'number') { - return { - saslContinue: 1, - conversationId: conversationId, - payload: new Binary(BSON.serialize({ jwt: token })) + /** + * Ensure the callback is only executed one at a time and throttles the calls + * to every 100ms. + */ + protected withLock(callback: OIDCCallbackFunction): OIDCCallbackFunction { + let lock: Promise = Promise.resolve(); + return async (params: OIDCCallbackParams): Promise => { + // We do this to ensure that we would never return the result of the + // previous lock, only the current callback's value would get returned. + await lock; + lock = lock + // eslint-disable-next-line github/no-then + .catch(() => null) + // eslint-disable-next-line github/no-then + .then(async () => { + const difference = Date.now() - this.lastExecutionTime; + if (difference <= THROTTLE_MS) { + await setTimeout(THROTTLE_MS - difference, { signal: params.timeoutContext }); + } + this.lastExecutionTime = Date.now(); + return await callback(params); + }); + return await lock; }; } - // saslContinue requires a conversationId in the command to be valid so in this - // case the server allows "step two" to actually be a saslStart with the token - // as the jwt since the use of the cached value has no correlating conversating - // on the particular connection. - return { - saslStart: 1, - mechanism: AuthMechanism.MONGODB_OIDC, - payload: new Binary(BSON.serialize({ jwt: token })) - }; } /** @@ -278,19 +186,3 @@ function isCallbackResultInvalid(tokenResult: unknown): boolean { if (!('accessToken' in tokenResult)) return true; return !Object.getOwnPropertyNames(tokenResult).every(prop => RESULT_PROPERTIES.includes(prop)); } - -/** - * Generate the saslStart command document. - */ -function startCommandDocument(credentials: MongoCredentials): Document { - const payload: Document = {}; - if (credentials.username) { - payload.n = credentials.username; - } - return { - saslStart: 1, - autoAuthorize: 1, - mechanism: AuthMechanism.MONGODB_OIDC, - payload: new Binary(BSON.serialize(payload)) - }; -} diff --git a/src/cmap/auth/mongodb_oidc/command_builders.ts b/src/cmap/auth/mongodb_oidc/command_builders.ts new file mode 100644 index 00000000000..2c2256e4afc --- /dev/null +++ b/src/cmap/auth/mongodb_oidc/command_builders.ts @@ -0,0 +1,54 @@ +import { Binary, BSON, type Document } from 'bson'; + +import { type MongoCredentials } from '../mongo_credentials'; +import { AuthMechanism } from '../providers'; + +/** @internal */ +export interface OIDCCommand { + saslStart?: number; + saslContinue?: number; + conversationId?: number; + mechanism?: string; + autoAuthorize?: number; + db?: string; + payload: Binary; +} + +/** + * Generate the finishing command document for authentication. Will be a + * saslStart or saslContinue depending on the presence of a conversation id. + */ +export function finishCommandDocument(token: string, conversationId?: number): OIDCCommand { + if (conversationId != null) { + return { + saslContinue: 1, + conversationId: conversationId, + payload: new Binary(BSON.serialize({ jwt: token })) + }; + } + // saslContinue requires a conversationId in the command to be valid so in this + // case the server allows "step two" to actually be a saslStart with the token + // as the jwt since the use of the cached value has no correlating conversating + // on the particular connection. + return { + saslStart: 1, + mechanism: AuthMechanism.MONGODB_OIDC, + payload: new Binary(BSON.serialize({ jwt: token })) + }; +} + +/** + * Generate the saslStart command document. + */ +export function startCommandDocument(credentials: MongoCredentials): OIDCCommand { + const payload: Document = {}; + if (credentials.username) { + payload.n = credentials.username; + } + return { + saslStart: 1, + autoAuthorize: 1, + mechanism: AuthMechanism.MONGODB_OIDC, + payload: new Binary(BSON.serialize(payload)) + }; +} diff --git a/src/cmap/auth/mongodb_oidc/gcp_machine_workflow.ts b/src/cmap/auth/mongodb_oidc/gcp_machine_workflow.ts new file mode 100644 index 00000000000..6b8c1ee0541 --- /dev/null +++ b/src/cmap/auth/mongodb_oidc/gcp_machine_workflow.ts @@ -0,0 +1,53 @@ +import { MongoGCPError } from '../../../error'; +import { get } from '../../../utils'; +import { type MongoCredentials } from '../mongo_credentials'; +import { type AccessToken, MachineWorkflow } from './machine_workflow'; +import { type TokenCache } from './token_cache'; + +/** GCP base URL. */ +const GCP_BASE_URL = + 'http://metadata/computeMetadata/v1/instance/service-accounts/default/identity'; + +/** GCP request headers. */ +const GCP_HEADERS = Object.freeze({ 'Metadata-Flavor': 'Google' }); + +/** Error for when the token audience is missing in the environment. */ +const TOKEN_RESOURCE_MISSING_ERROR = + 'TOKEN_RESOURCE must be set in the auth mechanism properties when ENVIRONMENT is gcp.'; + +export class GCPMachineWorkflow extends MachineWorkflow { + /** + * Instantiate the machine workflow. + */ + constructor(cache: TokenCache) { + super(cache); + } + + /** + * Get the token from the environment. + */ + async getToken(credentials?: MongoCredentials): Promise { + const tokenAudience = credentials?.mechanismProperties.TOKEN_RESOURCE; + if (!tokenAudience) { + throw new MongoGCPError(TOKEN_RESOURCE_MISSING_ERROR); + } + return await getGcpTokenData(tokenAudience); + } +} + +/** + * Hit the GCP endpoint to get the token data. + */ +async function getGcpTokenData(tokenAudience: string): Promise { + const url = new URL(GCP_BASE_URL); + url.searchParams.append('audience', tokenAudience); + const response = await get(url, { + headers: GCP_HEADERS + }); + if (response.status !== 200) { + throw new MongoGCPError( + `Status code ${response.status} returned from the GCP endpoint. Response body: ${response.body}` + ); + } + return { access_token: response.body }; +} diff --git a/src/cmap/auth/mongodb_oidc/human_callback_workflow.ts b/src/cmap/auth/mongodb_oidc/human_callback_workflow.ts new file mode 100644 index 00000000000..13ac81a6be5 --- /dev/null +++ b/src/cmap/auth/mongodb_oidc/human_callback_workflow.ts @@ -0,0 +1,142 @@ +import { BSON } from 'bson'; + +import { MONGODB_ERROR_CODES, MongoError, MongoOIDCError } from '../../../error'; +import { Timeout, TimeoutError } from '../../../timeout'; +import { type Connection } from '../../connection'; +import { type MongoCredentials } from '../mongo_credentials'; +import { + type IdPInfo, + OIDC_VERSION, + type OIDCCallbackFunction, + type OIDCCallbackParams, + type OIDCResponse +} from '../mongodb_oidc'; +import { CallbackWorkflow, HUMAN_TIMEOUT_MS } from './callback_workflow'; +import { type TokenCache } from './token_cache'; + +/** + * Class implementing behaviour for the non human callback workflow. + * @internal + */ +export class HumanCallbackWorkflow extends CallbackWorkflow { + /** + * Instantiate the human callback workflow. + */ + constructor(cache: TokenCache, callback: OIDCCallbackFunction) { + super(cache, callback); + } + + /** + * Execute the OIDC human callback workflow. + */ + async execute(connection: Connection, credentials: MongoCredentials): Promise { + // Check if the Client Cache has an access token. + // If it does, cache the access token in the Connection Cache and perform a One-Step SASL conversation + // using the access token. If the server returns an Authentication error (18), + // invalidate the access token token from the Client Cache, clear the Connection Cache, + // and restart the authentication flow. Raise any other errors to the user. On success, exit the algorithm. + if (this.cache.hasAccessToken) { + const token = this.cache.getAccessToken(); + connection.accessToken = token; + try { + return await this.finishAuthentication(connection, credentials, token); + } catch (error) { + if ( + error instanceof MongoError && + error.code === MONGODB_ERROR_CODES.AuthenticationFailed + ) { + this.cache.removeAccessToken(); + delete connection.accessToken; + return await this.execute(connection, credentials); + } else { + throw error; + } + } + } + // Check if the Client Cache has a refresh token. + // If it does, call the OIDC Human Callback with the cached refresh token and IdpInfo to get a + // new access token. Cache the new access token in the Client Cache and Connection Cache. + // Perform a One-Step SASL conversation using the new access token. If the the server returns + // an Authentication error (18), clear the refresh token, invalidate the access token from the + // Client Cache, clear the Connection Cache, and restart the authentication flow. Raise any other + // errors to the user. On success, exit the algorithm. + if (this.cache.hasRefreshToken) { + const refreshToken = this.cache.getRefreshToken(); + const result = await this.fetchAccessToken( + this.cache.getIdpInfo(), + credentials, + refreshToken + ); + this.cache.put(result); + connection.accessToken = result.accessToken; + try { + return await this.finishAuthentication(connection, credentials, result.accessToken); + } catch (error) { + if ( + error instanceof MongoError && + error.code === MONGODB_ERROR_CODES.AuthenticationFailed + ) { + this.cache.removeRefreshToken(); + delete connection.accessToken; + return await this.execute(connection, credentials); + } else { + throw error; + } + } + } + + // Start a new Two-Step SASL conversation. + // Run a PrincipalStepRequest to get the IdpInfo. + // Call the OIDC Human Callback with the new IdpInfo to get a new access token and optional refresh + // token. Drivers MUST NOT pass a cached refresh token to the callback when performing + // a new Two-Step conversation. Cache the new IdpInfo and refresh token in the Client Cache and the + // new access token in the Client Cache and Connection Cache. + // Attempt to authenticate using a JwtStepRequest with the new access token. Raise any errors to the user. + const startResponse = await this.startAuthentication(connection, credentials); + const conversationId = startResponse.conversationId; + const idpInfo = BSON.deserialize(startResponse.payload.buffer) as IdPInfo; + const callbackResponse = await this.fetchAccessToken(idpInfo, credentials); + this.cache.put(callbackResponse, idpInfo); + connection.accessToken = callbackResponse.accessToken; + return await this.finishAuthentication( + connection, + credentials, + callbackResponse.accessToken, + conversationId + ); + } + + /** + * Fetches an access token using the callback. + */ + private async fetchAccessToken( + idpInfo: IdPInfo, + credentials: MongoCredentials, + refreshToken?: string + ): Promise { + const controller = new AbortController(); + const params: OIDCCallbackParams = { + timeoutContext: controller.signal, + version: OIDC_VERSION, + idpInfo: idpInfo + }; + if (credentials.username) { + params.username = credentials.username; + } + if (refreshToken) { + params.refreshToken = refreshToken; + } + const timeout = Timeout.expires(HUMAN_TIMEOUT_MS); + try { + return await Promise.race([this.executeAndValidateCallback(params), timeout]); + } catch (error) { + if (TimeoutError.is(error)) { + controller.abort(); + throw new MongoOIDCError(`OIDC callback timed out after ${HUMAN_TIMEOUT_MS}ms.`); + } + throw error; + } finally { + timeout.clear(); + } + } +} diff --git a/src/cmap/auth/mongodb_oidc/machine_workflow.ts b/src/cmap/auth/mongodb_oidc/machine_workflow.ts new file mode 100644 index 00000000000..b7cbc8ab2e1 --- /dev/null +++ b/src/cmap/auth/mongodb_oidc/machine_workflow.ts @@ -0,0 +1,137 @@ +import { type Document } from 'bson'; +import { setTimeout } from 'timers/promises'; + +import { ns } from '../../../utils'; +import type { Connection } from '../../connection'; +import type { MongoCredentials } from '../mongo_credentials'; +import type { Workflow } from '../mongodb_oidc'; +import { finishCommandDocument } from './command_builders'; +import { type TokenCache } from './token_cache'; + +/** The time to throttle callback calls. */ +const THROTTLE_MS = 100; + +/** + * The access token format. + * @internal + */ +export interface AccessToken { + access_token: string; + expires_in?: number; +} + +/** @internal */ +export type OIDCTokenFunction = (credentials: MongoCredentials) => Promise; + +/** + * Common behaviour for OIDC machine workflows. + * @internal + */ +export abstract class MachineWorkflow implements Workflow { + cache: TokenCache; + callback: OIDCTokenFunction; + lastExecutionTime: number; + + /** + * Instantiate the machine workflow. + */ + constructor(cache: TokenCache) { + this.cache = cache; + this.callback = this.withLock(this.getToken.bind(this)); + this.lastExecutionTime = Date.now() - THROTTLE_MS; + } + + /** + * Execute the workflow. Gets the token from the subclass implementation. + */ + async execute(connection: Connection, credentials: MongoCredentials): Promise { + const token = await this.getTokenFromCacheOrEnv(connection, credentials); + const command = finishCommandDocument(token); + await connection.command(ns(credentials.source), command, undefined); + } + + /** + * Reauthenticate on a machine workflow just grabs the token again since the server + * has said the current access token is invalid or expired. + */ + async reauthenticate(connection: Connection, credentials: MongoCredentials): Promise { + if (this.cache.hasAccessToken) { + // Reauthentication implies the token has expired. + if (connection.accessToken === this.cache.getAccessToken()) { + // If connection's access token is the same as the cache's, remove + // the token from the cache and connection. + this.cache.removeAccessToken(); + delete connection.accessToken; + } else { + // If the connection's access token is different from the cache's, set + // the cache's token on the connection and do not remove from the + // cache. + connection.accessToken = this.cache.getAccessToken(); + } + } + await this.execute(connection, credentials); + } + + /** + * Get the document to add for speculative authentication. + */ + async speculativeAuth(connection: Connection, credentials: MongoCredentials): Promise { + // The spec states only cached access tokens can use speculative auth. + if (!this.cache.hasAccessToken) { + return {}; + } + const token = await this.getTokenFromCacheOrEnv(connection, credentials); + const document = finishCommandDocument(token); + document.db = credentials.source; + return { speculativeAuthenticate: document }; + } + + /** + * Get the token from the cache or environment. + */ + private async getTokenFromCacheOrEnv( + connection: Connection, + credentials: MongoCredentials + ): Promise { + if (this.cache.hasAccessToken) { + return this.cache.getAccessToken(); + } else { + const token = await this.callback(credentials); + this.cache.put({ accessToken: token.access_token, expiresInSeconds: token.expires_in }); + // Put the access token on the connection as well. + connection.accessToken = token.access_token; + return token.access_token; + } + } + + /** + * Ensure the callback is only executed one at a time, and throttled to + * only once per 100ms. + */ + private withLock(callback: OIDCTokenFunction): OIDCTokenFunction { + let lock: Promise = Promise.resolve(); + return async (credentials: MongoCredentials): Promise => { + // We do this to ensure that we would never return the result of the + // previous lock, only the current callback's value would get returned. + await lock; + lock = lock + // eslint-disable-next-line github/no-then + .catch(() => null) + // eslint-disable-next-line github/no-then + .then(async () => { + const difference = Date.now() - this.lastExecutionTime; + if (difference <= THROTTLE_MS) { + await setTimeout(THROTTLE_MS - difference); + } + this.lastExecutionTime = Date.now(); + return await callback(credentials); + }); + return await lock; + }; + } + + /** + * Get the token from the environment or endpoint. + */ + abstract getToken(credentials: MongoCredentials): Promise; +} diff --git a/src/cmap/auth/mongodb_oidc/service_workflow.ts b/src/cmap/auth/mongodb_oidc/service_workflow.ts deleted file mode 100644 index dcf086b8071..00000000000 --- a/src/cmap/auth/mongodb_oidc/service_workflow.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { BSON, type Document } from 'bson'; - -import { ns } from '../../../utils'; -import type { Connection } from '../../connection'; -import type { MongoCredentials } from '../mongo_credentials'; -import type { Workflow } from '../mongodb_oidc'; -import { AuthMechanism } from '../providers'; - -/** - * Common behaviour for OIDC device workflows. - * @internal - */ -export abstract class ServiceWorkflow implements Workflow { - /** - * Execute the workflow. Looks for AWS_WEB_IDENTITY_TOKEN_FILE in the environment - * and then attempts to read the token from that path. - */ - async execute(connection: Connection, credentials: MongoCredentials): Promise { - const token = await this.getToken(credentials); - const command = commandDocument(token); - return await connection.command(ns(credentials.source), command, undefined); - } - - /** - * Get the document to add for speculative authentication. - */ - async speculativeAuth(credentials: MongoCredentials): Promise { - const token = await this.getToken(credentials); - const document = commandDocument(token); - document.db = credentials.source; - return { speculativeAuthenticate: document }; - } - - /** - * Get the token from the environment or endpoint. - */ - abstract getToken(credentials: MongoCredentials): Promise; -} - -/** - * Create the saslStart command document. - */ -export function commandDocument(token: string): Document { - return { - saslStart: 1, - mechanism: AuthMechanism.MONGODB_OIDC, - payload: BSON.serialize({ jwt: token }) - }; -} diff --git a/src/cmap/auth/mongodb_oidc/token_cache.ts b/src/cmap/auth/mongodb_oidc/token_cache.ts new file mode 100644 index 00000000000..e2f7ad4954b --- /dev/null +++ b/src/cmap/auth/mongodb_oidc/token_cache.ts @@ -0,0 +1,62 @@ +import { MongoDriverError } from '../../../error'; +import type { IdPInfo, OIDCResponse } from '../mongodb_oidc'; + +class MongoOIDCError extends MongoDriverError {} + +/** @internal */ +export class TokenCache { + private accessToken?: string; + private refreshToken?: string; + private idpInfo?: IdPInfo; + private expiresInSeconds?: number; + + get hasAccessToken(): boolean { + return !!this.accessToken; + } + + get hasRefreshToken(): boolean { + return !!this.refreshToken; + } + + get hasIdpInfo(): boolean { + return !!this.idpInfo; + } + + getAccessToken(): string { + if (!this.accessToken) { + throw new MongoOIDCError('Attempted to get an access token when none exists.'); + } + return this.accessToken; + } + + getRefreshToken(): string { + if (!this.refreshToken) { + throw new MongoOIDCError('Attempted to get a refresh token when none exists.'); + } + return this.refreshToken; + } + + getIdpInfo(): IdPInfo { + if (!this.idpInfo) { + throw new MongoOIDCError('Attempted to get IDP information when none exists.'); + } + return this.idpInfo; + } + + put(response: OIDCResponse, idpInfo?: IdPInfo) { + this.accessToken = response.accessToken; + this.refreshToken = response.refreshToken; + this.expiresInSeconds = response.expiresInSeconds; + if (idpInfo) { + this.idpInfo = idpInfo; + } + } + + removeAccessToken() { + this.accessToken = undefined; + } + + removeRefreshToken() { + this.refreshToken = undefined; + } +} diff --git a/src/cmap/auth/mongodb_oidc/token_entry_cache.ts b/src/cmap/auth/mongodb_oidc/token_entry_cache.ts deleted file mode 100644 index 1b5b9de3314..00000000000 --- a/src/cmap/auth/mongodb_oidc/token_entry_cache.ts +++ /dev/null @@ -1,77 +0,0 @@ -import type { IdPServerInfo, IdPServerResponse } from '../mongodb_oidc'; -import { Cache, ExpiringCacheEntry } from './cache'; - -/* Default expiration is now for when no expiration provided */ -const DEFAULT_EXPIRATION_SECS = 0; - -/** @internal */ -export class TokenEntry extends ExpiringCacheEntry { - tokenResult: IdPServerResponse; - serverInfo: IdPServerInfo; - - /** - * Instantiate the entry. - */ - constructor(tokenResult: IdPServerResponse, serverInfo: IdPServerInfo, expiration: number) { - super(expiration); - this.tokenResult = tokenResult; - this.serverInfo = serverInfo; - } -} - -/** - * Cache of OIDC token entries. - * @internal - */ -export class TokenEntryCache extends Cache { - /** - * Set an entry in the token cache. - */ - addEntry( - address: string, - username: string, - callbackHash: string, - tokenResult: IdPServerResponse, - serverInfo: IdPServerInfo - ): TokenEntry { - const entry = new TokenEntry( - tokenResult, - serverInfo, - tokenResult.expiresInSeconds ?? DEFAULT_EXPIRATION_SECS - ); - this.entries.set(this.cacheKey(address, username, callbackHash), entry); - return entry; - } - - /** - * Delete an entry from the cache. - */ - deleteEntry(address: string, username: string, callbackHash: string): void { - this.entries.delete(this.cacheKey(address, username, callbackHash)); - } - - /** - * Get an entry from the cache. - */ - getEntry(address: string, username: string, callbackHash: string): TokenEntry | undefined { - return this.entries.get(this.cacheKey(address, username, callbackHash)); - } - - /** - * Delete all expired entries from the cache. - */ - deleteExpiredEntries(): void { - for (const [key, entry] of this.entries) { - if (!entry.isValid()) { - this.entries.delete(key); - } - } - } - - /** - * Create a cache key from the address and username. - */ - cacheKey(address: string, username: string, callbackHash: string): string { - return this.hashedCacheKey(address, username, callbackHash); - } -} diff --git a/src/cmap/auth/mongodb_oidc/token_machine_workflow.ts b/src/cmap/auth/mongodb_oidc/token_machine_workflow.ts new file mode 100644 index 00000000000..de32c469594 --- /dev/null +++ b/src/cmap/auth/mongodb_oidc/token_machine_workflow.ts @@ -0,0 +1,34 @@ +import * as fs from 'fs'; + +import { MongoAWSError } from '../../../error'; +import { type AccessToken, MachineWorkflow } from './machine_workflow'; +import { type TokenCache } from './token_cache'; + +/** Error for when the token is missing in the environment. */ +const TOKEN_MISSING_ERROR = 'OIDC_TOKEN_FILE must be set in the environment.'; + +/** + * Device workflow implementation for AWS. + * + * @internal + */ +export class TokenMachineWorkflow extends MachineWorkflow { + /** + * Instantiate the machine workflow. + */ + constructor(cache: TokenCache) { + super(cache); + } + + /** + * Get the token from the environment. + */ + async getToken(): Promise { + const tokenFile = process.env.OIDC_TOKEN_FILE; + if (!tokenFile) { + throw new MongoAWSError(TOKEN_MISSING_ERROR); + } + const token = await fs.promises.readFile(tokenFile, 'utf8'); + return { access_token: token }; + } +} diff --git a/src/cmap/auth/providers.ts b/src/cmap/auth/providers.ts index d01c06324bb..74e3638ecc5 100644 --- a/src/cmap/auth/providers.ts +++ b/src/cmap/auth/providers.ts @@ -8,7 +8,6 @@ export const AuthMechanism = Object.freeze({ MONGODB_SCRAM_SHA1: 'SCRAM-SHA-1', MONGODB_SCRAM_SHA256: 'SCRAM-SHA-256', MONGODB_X509: 'MONGODB-X509', - /** @experimental */ MONGODB_OIDC: 'MONGODB-OIDC' } as const); diff --git a/src/cmap/connect.ts b/src/cmap/connect.ts index abc530f8805..e319dbbed9b 100644 --- a/src/cmap/connect.ts +++ b/src/cmap/connect.ts @@ -91,7 +91,10 @@ export async function performInitialHandshake( if (credentials) { if ( !(credentials.mechanism === AuthMechanism.MONGODB_DEFAULT) && - !options.authProviders.getOrCreateProvider(credentials.mechanism) + !options.authProviders.getOrCreateProvider( + credentials.mechanism, + credentials.mechanismProperties + ) ) { throw new MongoInvalidArgumentError(`AuthMechanism '${credentials.mechanism}' not supported`); } @@ -146,7 +149,10 @@ export async function performInitialHandshake( authContext.response = response; const resolvedCredentials = credentials.resolveAuthMechanism(response); - const provider = options.authProviders.getOrCreateProvider(resolvedCredentials.mechanism); + const provider = options.authProviders.getOrCreateProvider( + resolvedCredentials.mechanism, + resolvedCredentials.mechanismProperties + ); if (!provider) { throw new MongoInvalidArgumentError( `No AuthProvider for ${resolvedCredentials.mechanism} defined.` @@ -218,7 +224,8 @@ export async function prepareHandshakeDocument( handshakeDoc.saslSupportedMechs = `${credentials.source}.${credentials.username}`; const provider = authContext.options.authProviders.getOrCreateProvider( - AuthMechanism.MONGODB_SCRAM_SHA256 + AuthMechanism.MONGODB_SCRAM_SHA256, + credentials.mechanismProperties ); if (!provider) { // This auth mechanism is always present. @@ -228,7 +235,10 @@ export async function prepareHandshakeDocument( } return await provider.prepare(handshakeDoc, authContext); } - const provider = authContext.options.authProviders.getOrCreateProvider(credentials.mechanism); + const provider = authContext.options.authProviders.getOrCreateProvider( + credentials.mechanism, + credentials.mechanismProperties + ); if (!provider) { throw new MongoInvalidArgumentError(`No AuthProvider for ${credentials.mechanism} defined.`); } diff --git a/src/cmap/connection.ts b/src/cmap/connection.ts index e1ad9a02935..c6420d8306e 100644 --- a/src/cmap/connection.ts +++ b/src/cmap/connection.ts @@ -174,6 +174,7 @@ export class Connection extends TypedEventEmitter { public authContext?: AuthContext; public delayedTimeoutId: NodeJS.Timeout | null = null; public generation: number; + public accessToken?: string; public readonly description: Readonly; /** * Represents if the connection has been established: diff --git a/src/cmap/connection_pool.ts b/src/cmap/connection_pool.ts index 7c271e8a97f..f91e1361f65 100644 --- a/src/cmap/connection_pool.ts +++ b/src/cmap/connection_pool.ts @@ -551,7 +551,8 @@ export class ConnectionPool extends TypedEventEmitter { const resolvedCredentials = credentials.resolveAuthMechanism(connection.hello); const provider = this[kServer].topology.client.s.authProviders.getOrCreateProvider( - resolvedCredentials.mechanism + resolvedCredentials.mechanism, + resolvedCredentials.mechanismProperties ); if (!provider) { diff --git a/src/connection_string.ts b/src/connection_string.ts index c2abf08aaac..b0becafac05 100644 --- a/src/connection_string.ts +++ b/src/connection_string.ts @@ -698,6 +698,9 @@ export const OPTIONS = { }); } }, + // Note that if the authMechanismProperties contain a TOKEN_RESOURCE that has a + // comma in it, it MUST be supplied as a MongoClient option instead of in the + // connection string. authMechanismProperties: { target: 'credentials', transform({ options, values }): MongoCredentials { diff --git a/src/error.ts b/src/error.ts index 28c269af6be..294062e3d1c 100644 --- a/src/error.ts +++ b/src/error.ts @@ -36,6 +36,7 @@ export const NODE_IS_RECOVERING_ERROR_MESSAGE = new RegExp('node is recovering', export const MONGODB_ERROR_CODES = Object.freeze({ HostUnreachable: 6, HostNotFound: 7, + AuthenticationFailed: 18, NetworkTimeout: 89, ShutdownInProgress: 91, PrimarySteppedDown: 189, @@ -529,6 +530,34 @@ export class MongoAWSError extends MongoRuntimeError { } } +/** + * A error generated when the user attempts to authenticate + * via OIDC callbacks, but fails. + * + * @public + * @category Error + */ +export class MongoOIDCError extends MongoRuntimeError { + /** + * **Do not use this constructor!** + * + * Meant for internal use only. + * + * @remarks + * This class is only meant to be constructed within the driver. This constructor is + * not subject to semantic versioning compatibility guarantees and may change at any time. + * + * @public + **/ + constructor(message: string) { + super(message); + } + + override get name(): string { + return 'MongoOIDCError'; + } +} + /** * A error generated when the user attempts to authenticate * via Azure, but fails. @@ -536,7 +565,7 @@ export class MongoAWSError extends MongoRuntimeError { * @public * @category Error */ -export class MongoAzureError extends MongoRuntimeError { +export class MongoAzureError extends MongoOIDCError { /** * **Do not use this constructor!** * @@ -557,6 +586,34 @@ export class MongoAzureError extends MongoRuntimeError { } } +/** + * A error generated when the user attempts to authenticate + * via GCP, but fails. + * + * @public + * @category Error + */ +export class MongoGCPError extends MongoOIDCError { + /** + * **Do not use this constructor!** + * + * Meant for internal use only. + * + * @remarks + * This class is only meant to be constructed within the driver. This constructor is + * not subject to semantic versioning compatibility guarantees and may change at any time. + * + * @public + **/ + constructor(message: string) { + super(message); + } + + override get name(): string { + return 'MongoGCPError'; + } +} + /** * An error generated when a ChangeStream operation fails to execute. * diff --git a/src/index.ts b/src/index.ts index 812d045ba6a..7c0bfdf841d 100644 --- a/src/index.ts +++ b/src/index.ts @@ -52,6 +52,7 @@ export { MongoDriverError, MongoError, MongoExpiredSessionError, + MongoGCPError, MongoGridFSChunkError, MongoGridFSStreamError, MongoInvalidArgumentError, @@ -61,6 +62,7 @@ export { MongoNetworkError, MongoNetworkTimeoutError, MongoNotConnectedError, + MongoOIDCError, MongoParseError, MongoRuntimeError, MongoServerClosedError, @@ -250,12 +252,14 @@ export type { MongoCredentialsOptions } from './cmap/auth/mongo_credentials'; export type { - IdPServerInfo, + IdPInfo, IdPServerResponse, - OIDCCallbackContext, - OIDCRefreshFunction, - OIDCRequestFunction + OIDCCallbackFunction, + OIDCCallbackParams, + OIDCResponse } from './cmap/auth/mongodb_oidc'; +export type { Workflow } from './cmap/auth/mongodb_oidc'; +export type { TokenCache } from './cmap/auth/mongodb_oidc/token_cache'; export type { MessageHeader, OpCompressedRequest, diff --git a/src/mongo_client.ts b/src/mongo_client.ts index 1e21aefe35a..aee241076f9 100644 --- a/src/mongo_client.ts +++ b/src/mongo_client.ts @@ -10,6 +10,7 @@ import { DEFAULT_ALLOWED_HOSTS, type MongoCredentials } from './cmap/auth/mongo_credentials'; +import { type TokenCache } from './cmap/auth/mongodb_oidc/token_cache'; import { AuthMechanism } from './cmap/auth/providers'; import type { LEGAL_TCP_SOCKET_OPTIONS, LEGAL_TLS_SOCKET_OPTIONS } from './cmap/connect'; import type { Connection } from './cmap/connection'; @@ -524,7 +525,7 @@ export class MongoClient extends TypedEventEmitter { if (options.credentials?.mechanism === AuthMechanism.MONGODB_OIDC) { const allowedHosts = options.credentials?.mechanismProperties?.ALLOWED_HOSTS || DEFAULT_ALLOWED_HOSTS; - const isServiceAuth = !!options.credentials?.mechanismProperties?.PROVIDER_NAME; + const isServiceAuth = !!options.credentials?.mechanismProperties?.ENVIRONMENT; if (!isServiceAuth) { for (const host of options.hosts) { if (!hostMatchesWildcards(host.toHostPort().host, allowedHosts)) { @@ -828,6 +829,8 @@ export interface MongoOptions extendedMetadata: Promise; /** @internal */ autoEncrypter?: AutoEncrypter; + /** @internal */ + tokenCache?: TokenCache; proxyHost?: string; proxyPort?: number; proxyUsername?: string; diff --git a/src/mongo_client_auth_providers.ts b/src/mongo_client_auth_providers.ts index 557783c4e17..7b2b66698dc 100644 --- a/src/mongo_client_auth_providers.ts +++ b/src/mongo_client_auth_providers.ts @@ -1,8 +1,12 @@ import { type AuthProvider } from './cmap/auth/auth_provider'; import { GSSAPI } from './cmap/auth/gssapi'; +import { type AuthMechanismProperties } from './cmap/auth/mongo_credentials'; import { MongoCR } from './cmap/auth/mongocr'; import { MongoDBAWS } from './cmap/auth/mongodb_aws'; -import { MongoDBOIDC } from './cmap/auth/mongodb_oidc'; +import { MongoDBOIDC, OIDC_WORKFLOWS, type Workflow } from './cmap/auth/mongodb_oidc'; +import { AutomatedCallbackWorkflow } from './cmap/auth/mongodb_oidc/automated_callback_workflow'; +import { HumanCallbackWorkflow } from './cmap/auth/mongodb_oidc/human_callback_workflow'; +import { TokenCache } from './cmap/auth/mongodb_oidc/token_cache'; import { Plain } from './cmap/auth/plain'; import { AuthMechanism } from './cmap/auth/providers'; import { ScramSHA1, ScramSHA256 } from './cmap/auth/scram'; @@ -10,11 +14,11 @@ import { X509 } from './cmap/auth/x509'; import { MongoInvalidArgumentError } from './error'; /** @internal */ -const AUTH_PROVIDERS = new Map AuthProvider>([ +const AUTH_PROVIDERS = new Map AuthProvider>([ [AuthMechanism.MONGODB_AWS, () => new MongoDBAWS()], [AuthMechanism.MONGODB_CR, () => new MongoCR()], [AuthMechanism.MONGODB_GSSAPI, () => new GSSAPI()], - [AuthMechanism.MONGODB_OIDC, () => new MongoDBOIDC()], + [AuthMechanism.MONGODB_OIDC, (workflow?: Workflow) => new MongoDBOIDC(workflow)], [AuthMechanism.MONGODB_PLAIN, () => new Plain()], [AuthMechanism.MONGODB_SCRAM_SHA1, () => new ScramSHA1()], [AuthMechanism.MONGODB_SCRAM_SHA256, () => new ScramSHA256()], @@ -33,22 +37,56 @@ export class MongoClientAuthProviders { * Get or create an authentication provider based on the provided mechanism. * We don't want to create all providers at once, as some providers may not be used. * @param name - The name of the provider to get or create. + * @param credentials - The credentials. * @returns The provider. * @throws MongoInvalidArgumentError if the mechanism is not supported. * @internal */ - getOrCreateProvider(name: AuthMechanism | string): AuthProvider { + getOrCreateProvider( + name: AuthMechanism | string, + authMechanismProperties: AuthMechanismProperties + ): AuthProvider { const authProvider = this.existingProviders.get(name); if (authProvider) { return authProvider; } - const provider = AUTH_PROVIDERS.get(name)?.(); - if (!provider) { + const providerFunction = AUTH_PROVIDERS.get(name); + if (!providerFunction) { throw new MongoInvalidArgumentError(`authMechanism ${name} not supported`); } + let provider; + if (name === AuthMechanism.MONGODB_OIDC) { + provider = providerFunction(this.getWorkflow(authMechanismProperties)); + } else { + provider = providerFunction(); + } + this.existingProviders.set(name, provider); return provider; } + + /** + * Gets either a device workflow or callback workflow. + */ + getWorkflow(authMechanismProperties: AuthMechanismProperties): Workflow { + if (authMechanismProperties.OIDC_HUMAN_CALLBACK) { + return new HumanCallbackWorkflow( + new TokenCache(), + authMechanismProperties.OIDC_HUMAN_CALLBACK + ); + } else if (authMechanismProperties.OIDC_CALLBACK) { + return new AutomatedCallbackWorkflow(new TokenCache(), authMechanismProperties.OIDC_CALLBACK); + } else { + const environment = authMechanismProperties.ENVIRONMENT; + const workflow = OIDC_WORKFLOWS.get(environment)?.(); + if (!workflow) { + throw new MongoInvalidArgumentError( + `Could not load workflow for environment ${authMechanismProperties.ENVIRONMENT}` + ); + } + return workflow; + } + } } diff --git a/src/sdam/server_description.ts b/src/sdam/server_description.ts index 5068931b6a2..c95f7cc9056 100644 --- a/src/sdam/server_description.ts +++ b/src/sdam/server_description.ts @@ -258,10 +258,19 @@ export function compareTopologyVersion( } // TODO(NODE-2674): Preserve int64 sent from MongoDB - const currentCounter = Long.isLong(currentTv.counter) - ? currentTv.counter - : Long.fromNumber(currentTv.counter); - const newCounter = Long.isLong(newTv.counter) ? newTv.counter : Long.fromNumber(newTv.counter); + const currentCounter = + typeof currentTv.counter === 'bigint' + ? Long.fromBigInt(currentTv.counter) + : Long.isLong(currentTv.counter) + ? currentTv.counter + : Long.fromNumber(currentTv.counter); + + const newCounter = + typeof newTv.counter === 'bigint' + ? Long.fromBigInt(newTv.counter) + : Long.isLong(newTv.counter) + ? newTv.counter + : Long.fromNumber(newTv.counter); return currentCounter.compare(newCounter); } diff --git a/src/sessions.ts b/src/sessions.ts index b766d40e288..20699d24ac4 100644 --- a/src/sessions.ts +++ b/src/sessions.ts @@ -369,6 +369,11 @@ export class ClientSession extends TypedEventEmitter { /** * Starts a new transaction with the given options. * + * @remarks + * **IMPORTANT**: Running operations in parallel is not supported during a transaction. The use of `Promise.all`, + * `Promise.allSettled`, `Promise.race`, etc to parallelize operations inside a transaction is + * undefined behaviour. + * * @param options - Options for the transaction */ startTransaction(options?: TransactionOptions): void { @@ -443,6 +448,11 @@ export class ClientSession extends TypedEventEmitter { * * **IMPORTANT:** This method requires the function passed in to return a Promise. That promise must be made by `await`-ing all operations in such a way that rejections are propagated to the returned promise. * + * **IMPORTANT:** Running operations in parallel is not supported during a transaction. The use of `Promise.all`, + * `Promise.allSettled`, `Promise.race`, etc to parallelize operations inside a transaction is + * undefined behaviour. + * + * * @remarks * - If all operations successfully complete and the `commitTransaction` operation is successful, then the provided function will return the result of the provided function. * - If the transaction is unable to complete or an error is thrown from within the provided function, then the provided function will throw an error. diff --git a/src/utils.ts b/src/utils.ts index 57079b1f639..2ede778258d 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -3,6 +3,7 @@ import type { SrvRecord } from 'dns'; import { type EventEmitter } from 'events'; import { promises as fs } from 'fs'; import * as http from 'http'; +import { clearTimeout, setTimeout } from 'timers'; import * as url from 'url'; import { URL } from 'url'; import { promisify } from 'util'; @@ -1157,6 +1158,38 @@ interface RequestOptions { headers?: http.OutgoingHttpHeaders; } +/** + * Perform a get request that returns status and body. + * @internal + */ +export function get( + url: URL | string, + options: http.RequestOptions = {} +): Promise<{ body: string; status: number | undefined }> { + return new Promise((resolve, reject) => { + /* eslint-disable prefer-const */ + let timeoutId: NodeJS.Timeout; + const request = http + .get(url, options, response => { + response.setEncoding('utf8'); + let body = ''; + response.on('data', chunk => (body += chunk)); + response.on('end', () => { + clearTimeout(timeoutId); + resolve({ status: response.statusCode, body }); + }); + }) + .on('error', error => { + clearTimeout(timeoutId); + reject(error); + }) + .end(); + timeoutId = setTimeout(() => { + request.destroy(new MongoNetworkTimeoutError(`request timed out after 10 seconds`)); + }, 10000); + }); +} + export async function request(uri: string): Promise>; export async function request( uri: string, diff --git a/test/integration/auth/mongodb_oidc.prose.test.ts b/test/integration/auth/mongodb_oidc.prose.test.ts new file mode 100644 index 00000000000..b2acf89e577 --- /dev/null +++ b/test/integration/auth/mongodb_oidc.prose.test.ts @@ -0,0 +1,1389 @@ +import { readFile } from 'node:fs/promises'; +import * as path from 'node:path'; + +import { expect } from 'chai'; +import * as sinon from 'sinon'; + +import { + type Collection, + MongoClient, + type MongoDBOIDC, + type OIDCCallbackParams, + type OIDCResponse +} from '../../mongodb'; + +const createCallback = (tokenFile = 'test_user1', expiresInSeconds?: number, extraFields?: any) => { + return async (params: OIDCCallbackParams) => { + const token = await readFile(path.join(process.env.OIDC_TOKEN_DIR, tokenFile), { + encoding: 'utf8' + }); + // Assert the correct properties are set. + expect(params).to.have.property('timeoutContext'); + expect(params).to.have.property('version'); + return generateResult(token, expiresInSeconds, extraFields); + }; +}; + +// Generates the result the request or refresh callback returns. +const generateResult = (token: string, expiresInSeconds?: number, extraFields?: any) => { + const response: OIDCResponse = { accessToken: token, refreshToken: token }; + if (expiresInSeconds) { + response.expiresInSeconds = expiresInSeconds; + } + if (extraFields) { + return { ...response, ...extraFields }; + } + return response; +}; + +describe('OIDC Auth Spec Tests', function () { + beforeEach(function () { + if (process.env.ENVIRONMENT !== 'test') { + this.skipReason = 'GCP OIDC prose tests require a Test OIDC environment.'; + this.skip(); + } + }); + + describe('Machine Authentication Flow Prose Tests', function () { + const uriSingle = process.env.MONGODB_URI_SINGLE; + + describe('1. Callback Authentication', function () { + let client: MongoClient; + let collection: Collection; + + afterEach(async function () { + await client?.close(); + }); + + describe('1.1 Callback is called during authentication', function () { + const callbackSpy = sinon.spy(createCallback('test_machine')); + // Create an OIDC configured client. + // Perform a find operation that succeeds. + // Assert that the callback was called 1 time. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: callbackSpy + }, + retryReads: false + }); + collection = client.db('test').collection('test'); + }); + + it('successfully authenticates', async function () { + await collection.findOne(); + expect(callbackSpy).to.have.been.calledOnce; + }); + }); + + describe('1.2 Callback is called once for multiple connections', function () { + const callbackSpy = sinon.spy(createCallback()); + // Create an OIDC configured client. + // Start 10 threads and run 100 find operations in each thread that all succeed. + // Assert that the callback was called 1 time. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: callbackSpy + }, + retryReads: false + }); + collection = client.db('test').collection('test'); + }); + + it('only calls the callback once', async function () { + for (let i = 0; i < 100; i++) { + await collection.findOne(); + } + expect(callbackSpy).to.have.been.calledOnce; + }); + }); + }); + + describe('2. OIDC Callback Validation', function () { + let client: MongoClient; + let collection: Collection; + + afterEach(async function () { + await client?.close(); + }); + + describe('2.1 Valid Callback Inputs', function () { + const callbackSpy = sinon.spy(createCallback()); + // Create an OIDC configured client with an OIDC callback that validates its inputs and returns a valid access token. + // Perform a find operation that succeeds. + // Assert that the OIDC callback was called with the appropriate inputs, including the timeout parameter if possible. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: callbackSpy + }, + retryReads: false + }); + collection = client.db('test').collection('test'); + }); + + it('successfully authenticates', async function () { + await collection.findOne(); + // IdpInfo can change, so we assert we called once and validate existence in the callback itself. + expect(callbackSpy).to.have.been.calledOnce; + }); + }); + + describe('2.2 OIDC Callback Returns Null', function () { + const callbackSpy = sinon.spy(() => null); + // Create an OIDC configured client with an OIDC callback that returns null. + // Perform a find operation that fails. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: callbackSpy + }, + retryReads: false + }); + collection = client.db('test').collection('test'); + }); + + it('does not successfully authenticate', async function () { + const error = await collection.findOne().catch(error => error); + expect(error).to.exist; + }); + }); + + describe('2.3 OIDC Callback Returns Missing Data', function () { + const callbackSpy = sinon.spy(() => { + return { field: 'value' }; + }); + // Create an OIDC configured client with an OIDC callback that returns data not conforming to the OIDCCredential with missing fields. + // Perform a find operation that fails. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: callbackSpy + }, + retryReads: false + }); + collection = client.db('test').collection('test'); + }); + + it('does not successfully authenticate', async function () { + const error = await collection.findOne().catch(error => error); + expect(error).to.exist; + }); + }); + + describe('2.4 Invalid Client Configuration with Callback', function () { + const callbackSpy = sinon.spy(createCallback()); + // Create an OIDC configured client with an OIDC callback and auth mechanism property ENVIRONMENT:test. + // Assert it returns a client configuration error. + it('fails validation', async function () { + try { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: callbackSpy, + ENVIRONMENT: 'test' + }, + retryReads: false + }); + } catch (error) { + expect(error).to.exist; + } + }); + }); + }); + + describe('3. Authentication Failure', function () { + let client: MongoClient; + let collection: Collection; + + afterEach(async function () { + await client?.close(); + }); + + describe('3.1 Authentication failure with cached tokens fetch a new token and retry auth', function () { + const callbackSpy = sinon.spy(createCallback()); + // Create an OIDC configured client. + // Poison the Client Cache with an invalid access token. + // Perform a find operation that succeeds. + // Assert that the callback was called 1 time. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: callbackSpy + }, + retryReads: false + }); + const provider = client.s.authProviders.getOrCreateProvider('MONGODB-OIDC', { + OIDC_CALLBACK: callbackSpy + }) as MongoDBOIDC; + provider.workflow.cache.put({ idpServerResponse: { accessToken: 'bad' } }); + collection = client.db('test').collection('test'); + }); + + it('successfully authenticates', async function () { + await collection.findOne(); + expect(callbackSpy).to.have.been.calledOnce; + }); + }); + + describe('3.2 Authentication failures without cached tokens return an error', function () { + const callbackSpy = sinon.spy(() => { + return { accessToken: 'bad' }; + }); + // Create an OIDC configured client with an OIDC callback that always returns invalid access tokens. + // Perform a find operation that fails. + // Assert that the callback was called 1 time. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: callbackSpy + }, + retryReads: false + }); + const provider = client.s.authProviders.getOrCreateProvider('MONGODB-OIDC', { + OIDC_CALLBACK: callbackSpy + }) as MongoDBOIDC; + provider.workflow.cache.put({ idpServerResponse: { accessToken: 'bad' } }); + collection = client.db('test').collection('test'); + }); + + it('does not successfully authenticate', async function () { + const error = await collection.findOne().catch(error => error); + expect(error).to.exist; + expect(callbackSpy).to.have.been.calledOnce; + }); + }); + + describe('3.3 Unexpected error code does not clear the cache', function () { + let utilClient: MongoClient; + const callbackSpy = sinon.spy(createCallback()); + // Create a MongoClient with a callback that returns a valid token. + // Set a fail point for saslStart commands of the form: + // { + // configureFailPoint: "failCommand", + // mode: { + // times: 1 + // }, + // data: { + // failCommands: [ + // "saslStart" + // ], + // errorCode: 20 // IllegalOperation + // } + // } + // Perform a find operation that fails. + // Assert that the callback has been called once. + // Perform a find operation that succeeds. + // Assert that the callback has been called once. + // Close the client. + beforeEach(async function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: callbackSpy + }, + retryReads: false + }); + utilClient = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: createCallback() + }, + retryReads: false + }); + collection = client.db('test').collection('test'); + await utilClient + .db() + .admin() + .command({ + configureFailPoint: 'failCommand', + mode: { + times: 1 + }, + data: { + failCommands: ['saslStart'], + errorCode: 20 + } + }); + }); + + afterEach(async function () { + await utilClient.db().admin().command({ + configureFailPoint: 'failCommand', + mode: 'off' + }); + await utilClient.close(); + }); + + it('successfully authenticates the second time', async function () { + const error = await collection.findOne().catch(error => error); + expect(error).to.exist; + expect(callbackSpy).to.have.been.calledOnce; + await collection.findOne(); + expect(callbackSpy).to.have.been.calledOnce; + }); + }); + }); + + describe('4. Reauthentication', function () { + let client: MongoClient; + let collection: Collection; + let callbackCount = 0; + + afterEach(async function () { + callbackCount = 0; + await client?.close(); + }); + + const createBadCallback = () => { + return async () => { + if (callbackCount === 0) { + const token = await readFile(path.join(process.env.OIDC_TOKEN_DIR, 'test_user1'), { + encoding: 'utf8' + }); + callbackCount++; + return generateResult(token); + } + return generateResult('bad'); + }; + }; + + describe('4.1 Reauthentication Succeeds', function () { + let utilClient: MongoClient; + const callbackSpy = sinon.spy(createCallback()); + // Create an OIDC configured client. + // Set a fail point for find commands of the form: + // { + // configureFailPoint: "failCommand", + // mode: { + // times: 1 + // }, + // data: { + // failCommands: [ + // "find" + // ], + // errorCode: 391 // ReauthenticationRequired + // } + // } + // Perform a find operation that succeeds. + // Assert that the callback was called 2 times (once during the connection handshake, and again during reauthentication). + // Close the client. + beforeEach(async function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: callbackSpy + }, + retryReads: false + }); + utilClient = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: createCallback() + }, + retryReads: false + }); + collection = client.db('test').collection('test'); + await utilClient + .db() + .admin() + .command({ + configureFailPoint: 'failCommand', + mode: { + times: 1 + }, + data: { + failCommands: ['find'], + errorCode: 391 + } + }); + }); + + afterEach(async function () { + await utilClient.db().admin().command({ + configureFailPoint: 'failCommand', + mode: 'off' + }); + await utilClient.close(); + }); + + it('successfully authenticates', async function () { + await collection.findOne(); + expect(callbackSpy).to.have.been.calledTwice; + }); + }); + + describe('4.2 Read Commands Fail If Reauthentication Fails', function () { + let utilClient: MongoClient; + const callbackSpy = sinon.spy(createBadCallback()); + // Create a MongoClient whose OIDC callback returns one good token and then bad tokens after the first call. + // Perform a find operation that succeeds. + // Set a fail point for find commands of the form: + // { + // configureFailPoint: "failCommand", + // mode: { + // times: 1 + // }, + // data: { + // failCommands: [ + // "find" + // ], + // errorCode: 391 // ReauthenticationRequired + // } + // } + // Perform a find operation that fails. + // Assert that the callback was called 2 times. + // Close the client. + beforeEach(async function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: callbackSpy + }, + retryReads: false + }); + utilClient = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: createCallback() + }, + retryReads: false + }); + collection = client.db('test').collection('test'); + await utilClient + .db() + .admin() + .command({ + configureFailPoint: 'failCommand', + mode: { + times: 1 + }, + data: { + failCommands: ['find'], + errorCode: 391 + } + }); + }); + + afterEach(async function () { + await utilClient.db().admin().command({ + configureFailPoint: 'failCommand', + mode: 'off' + }); + await utilClient.close(); + }); + + it('does not successfully authenticate', async function () { + const error = await collection.findOne().catch(error => error); + expect(error).to.exist; + expect(callbackSpy).to.have.been.calledTwice; + }); + }); + + describe('4.3 Write Commands Fail If Reauthentication Fails', function () { + let utilClient: MongoClient; + const callbackSpy = sinon.spy(createBadCallback()); + // Create a MongoClient whose OIDC callback returns one good token and then bad tokens after the first call. + // Perform an insert operation that succeeds. + // Set a fail point for insert commands of the form: + // { + // configureFailPoint: "failCommand", + // mode: { + // times: 1 + // }, + // data: { + // failCommands: [ + // "insert" + // ], + // errorCode: 391 // ReauthenticationRequired + // } + // } + // Perform an insert operation that fails. + // Assert that the callback was called 2 times. + // Close the client. + beforeEach(async function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: callbackSpy + }, + retryReads: false + }); + utilClient = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_CALLBACK: createCallback() + }, + retryReads: false + }); + collection = client.db('test').collection('test'); + await collection.insertOne({ n: 1 }); + await utilClient + .db() + .admin() + .command({ + configureFailPoint: 'failCommand', + mode: { + times: 1 + }, + data: { + failCommands: ['insert'], + errorCode: 391 + } + }); + }); + + afterEach(async function () { + await utilClient.db().admin().command({ + configureFailPoint: 'failCommand', + mode: 'off' + }); + await utilClient.close(); + }); + + it('does not successfully authenticate', async function () { + const error = await collection.insertOne({ n: 2 }).catch(error => error); + expect(error).to.exist; + expect(callbackSpy).to.have.been.calledTwice; + }); + }); + }); + }); + + describe('Human Authentication Flow Prose Tests', function () { + const uriSingle = process.env.MONGODB_URI_SINGLE; + const uriMulti = process.env.MONGODB_URI_MULTI; + + describe('1. OIDC Human Callback Authentication', function () { + let client: MongoClient; + let collection: Collection; + + afterEach(async function () { + await client?.close(); + }); + + describe('1.1 Single Principal Implicit Username', function () { + const callbackSpy = sinon.spy(createCallback()); + // Create an OIDC configured client. + // Perform a find operation that succeeds. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + }); + + it('successfully authenticates', async function () { + const result = await collection.findOne(); + expect(result).to.be.null; + }); + }); + + describe('1.2 Single Principal Explicit Username', function () { + const callbackSpy = sinon.spy(createCallback()); + // Create an OIDC configured client with MONGODB_URI_SINGLE and a username of test_user1@${OIDC_DOMAIN}. + // Perform a find operation that succeeds. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriSingle, { + auth: { + username: `test_user1@${process.env.OIDC_DOMAIN}`, + password: undefined + }, + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + }); + + it('successfully authenticates', async function () { + const result = await collection.findOne(); + expect(result).to.be.null; + }); + }); + + describe('1.3 Multiple Principal User 1', function () { + const callbackSpy = sinon.spy(createCallback()); + // Create an OIDC configured client with MONGODB_URI_MULTI and username of test_user1@${OIDC_DOMAIN}. + // Perform a find operation that succeeds. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriMulti, { + auth: { + username: `test_user1@${process.env.OIDC_DOMAIN}`, + password: undefined + }, + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + }); + + it('successfully authenticates', async function () { + const result = await collection.findOne(); + expect(result).to.be.null; + }); + }); + + describe('1.4 Multiple Principal User 2', function () { + const callbackSpy = sinon.spy(createCallback('test_user2')); + // Create an OIDC configured client with MONGODB_URI_MULTI and username of test_user2@${OIDC_DOMAIN}. that reads the test_user2 token file. + // Perform a find operation that succeeds. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriMulti, { + auth: { + username: `test_user2@${process.env.OIDC_DOMAIN}`, + password: undefined + }, + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + }); + + it('successfully authenticates', async function () { + const result = await collection.findOne(); + expect(result).to.be.null; + }); + }); + + describe('1.5 Multiple Principal No User', function () { + const callbackSpy = sinon.spy(createCallback(null)); + // Create an OIDC configured client with MONGODB_URI_MULTI and no username. + // Assert that a find operation fails. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriMulti, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + }); + + it('does not successfully authenticate', async function () { + const error = await collection.findOne().catch(error => error); + expect(error).to.exist; + }); + }); + + describe('1.6 Allowed Hosts Blocked', function () { + context('when provided an empty ALLOWED_HOSTS', function () { + const callbackSpy = sinon.spy(createCallback()); + // Create an OIDC configured client with an ALLOWED_HOSTS that is an empty list. + // Assert that a find operation fails with a client-side error. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy, + ALLOWED_HOSTS: [] + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + }); + + it('does not successfully authenticate', async function () { + const error = await collection.findOne().catch(error => error); + expect(error).to.exist; + }); + }); + + context('when provided invalid ALLOWED_HOSTS', function () { + const callbackSpy = sinon.spy(createCallback()); + // Create a client that uses the URL mongodb://localhost/?authMechanism=MONGODB-OIDC&ignored=example.com, + // a human callback, and an ALLOWED_HOSTS that contains ["example.com"]. + // Assert that a find operation fails with a client-side error. + // Close the client. + // NOTE: For Node we remove the ignored=example.com URI option as we error on unrecognised options. + beforeEach(function () { + client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy, + ALLOWED_HOSTS: ['example.com'] + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + }); + + it('does not successfully authenticate', async function () { + const error = await collection.findOne().catch(error => error); + expect(error).to.exist; + }); + }); + }); + + describe('1.7 Allowed Hosts in Connection String Ignored', function () { + const callbackSpy = sinon.spy(createCallback()); + // Create an OIDC configured client with the connection string: + // mongodb+srv://example.com/?authMechanism=MONGODB-OIDC&authMechanismProperties=ALLOWED_HOSTS:%5B%22example.com%22%5D and a Human Callback. + // Assert that the creation of the client raises a configuration error. + it('fails on client creation', async function () { + expect(() => { + new MongoClient( + `${uriSingle}&authMechanismProperties=ALLOWED_HOSTS:%5B%22example.com%22%5D`, + { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + } + } + ); + }).to.throw(); + }); + }); + + describe('1.8 Machine IdP with Human Callback', function () { + const callbackSpy = sinon.spy(createCallback('test_machine')); + // This test MUST only be run when OIDC_IS_LOCAL is set. This indicates that the server is local and not using Atlas. + // In this case, MONGODB_URI_SINGLE will be configured with a human user test_user1, and a machine user test_machine. + // This test uses the machine user with a human callback, ensuring that the missing clientId in the PrincipalStepRequest + // response is handled by the driver. + // Create an OIDC configured client with MONGODB_URI_SINGLE and a username of test_machine that uses the test_machine token. + // Perform a find operation that succeeds. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriSingle, { + auth: { + username: `test_machine`, + password: undefined + }, + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + }); + + it('successfully authenticates', async function () { + const result = await collection.findOne(); + expect(result).to.be.null; + }); + }); + }); + + describe('2. OIDC Human Callback Validation', function () { + let client: MongoClient; + let collection: Collection; + + afterEach(async function () { + await client?.close(); + }); + + describe('2.1 Valid Callback Inputs', function () { + const callbackSpy = sinon.spy(createCallback()); + // Create an OIDC configured client with a human callback that validates its inputs and returns a valid access token. + // Perform a find operation that succeeds. Verify that the human callback was called with the appropriate inputs, including the timeout parameter if possible. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + }); + + it('successfully authenticates', async function () { + const result = await collection.findOne(); + expect(result).to.be.null; + }); + }); + + describe('2.2 Human Callback Returns Missing Data', function () { + const callbackSpy = sinon.spy(() => { + return { field: 'value' }; + }); + // Create an OIDC configured client with a human callback that returns data not conforming to the OIDCCredential with missing fields. + // Perform a find operation that fails. + // Close the client. + beforeEach(function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + }); + + it('does not successfully authenticate', async function () { + const error = await collection.findOne().catch(error => error); + expect(error).to.exist; + }); + }); + + describe('2.3 Refresh Token Is Passed To The Callback', function () { + let utilClient: MongoClient; + const callbackSpy = sinon.spy(createCallback()); + // Create a MongoClient with a human callback that checks for the presence of a refresh token. + // Perform a find operation that succeeds. + // Set a fail point for find commands of the form: + // { + // configureFailPoint: "failCommand", + // mode: { + // times: 1 + // }, + // data: { + // failCommands: [ + // "find" + // ], + // errorCode: 391 + // } + // } + // Perform a find operation that succeeds. + // Assert that the callback has been called twice. + // Assert that the refresh token was provided to the callback once. + beforeEach(async function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + retryReads: false + }); + utilClient = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: createCallback() + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + await collection.findOne(); + await utilClient + .db() + .admin() + .command({ + configureFailPoint: 'failCommand', + mode: { + times: 1 + }, + data: { + failCommands: ['find'], + errorCode: 391 + } + }); + }); + + afterEach(async function () { + await utilClient.db().admin().command({ + configureFailPoint: 'failCommand', + mode: 'off' + }); + await utilClient.close(); + }); + + it('successfully authenticates', async function () { + await collection.findOne(); + expect(callbackSpy).to.have.been.calledTwice; + expect(callbackSpy.lastCall.firstArg.refreshToken).to.not.be.null; + }); + }); + }); + + describe('3. Speculative Authentication', function () { + let client: MongoClient; + let collection: Collection; + + afterEach(async function () { + await client?.close(); + }); + + describe('3.1 Uses speculative authentication if there is a cached token', function () { + let utilClient: MongoClient; + const callbackSpy = sinon.spy(createCallback()); + // Create an OIDC configured client with a human callback that returns a valid token. + // Set a fail point for find commands of the form: + // { + // configureFailPoint: "failCommand", + // mode: { + // times: 1 + // }, + // data: { + // failCommands: [ + // "find" + // ], + // closeConnection: true + // } + // } + // Perform a find operation that fails. + // Set a fail point for saslStart commands of the form: + // { + // configureFailPoint: "failCommand", + // mode: { + // times: 1 + // }, + // data: { + // failCommands: [ + // "saslStart" + // ], + // errorCode: 18 + // } + // } + // Perform a find operation that succeeds. + // Close the client. + beforeEach(async function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + retryReads: false + }); + utilClient = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: createCallback() + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + await utilClient + .db() + .admin() + .command({ + configureFailPoint: 'failCommand', + mode: { + times: 1 + }, + data: { + failCommands: ['find'], + closeConnection: true + } + }); + const error = await collection.findOne().catch(error => error); + expect(error).to.exist; + await utilClient + .db() + .admin() + .command({ + configureFailPoint: 'failCommand', + mode: { + times: 1 + }, + data: { + failCommands: ['saslStart'], + errorCode: 18 + } + }); + }); + + afterEach(async function () { + await utilClient.db().admin().command({ + configureFailPoint: 'failCommand', + mode: 'off' + }); + await utilClient.close(); + }); + + it('successfully authenticates', async function () { + const result = await collection.findOne(); + expect(result).to.be.null; + }); + }); + + describe('3.2 Does not use speculative authentication if there is no cached token', function () { + let utilClient: MongoClient; + const callbackSpy = sinon.spy(createCallback()); + // Create an OIDC configured client with a human callback that returns a valid token. + // Set a fail point for saslStart commands of the form: + // { + // configureFailPoint: "failCommand", + // mode: { + // times: 1 + // }, + // data: { + // failCommands: [ + // "saslStart" + // ], + // errorCode: 18 + // } + // } + // Perform a find operation that fails. + // Close the client. + beforeEach(async function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + retryReads: false + }); + utilClient = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: createCallback() + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + await utilClient + .db() + .admin() + .command({ + configureFailPoint: 'failCommand', + mode: { + times: 2 + }, + data: { + failCommands: ['saslStart'], + errorCode: 18 + } + }); + }); + + afterEach(async function () { + await utilClient.db().admin().command({ + configureFailPoint: 'failCommand', + mode: 'off' + }); + await utilClient.close(); + }); + + it('does not successfully authenticate', async function () { + const error = await collection.findOne().catch(error => error); + expect(error).to.exist; + }); + }); + }); + + describe('4. Reauthentication', function () { + let client: MongoClient; + let collection: Collection; + + afterEach(async function () { + await client?.close(); + }); + + describe('4.1 Succeeds', function () { + let utilClient: MongoClient; + const callbackSpy = sinon.spy(createCallback()); + const commandStartedEvents = []; + const commandSucceededEvents = []; + const commandFailedEvents = []; + // Create an OIDC configured client and add an event listener. The following assumes that the driver + // does not emit saslStart or saslContinue events. If the driver does emit those events, ignore/filter + // them for the purposes of this test. + // Perform a find operation that succeeds. + // Assert that the human callback has been called once. + // Clear the listener state if possible. + // Force a reauthenication using a fail point of the form: + // { + // configureFailPoint: "failCommand", + // mode: { + // times: 1 + // }, + // data: { + // failCommands: [ + // "find" + // ], + // errorCode: 391 // ReauthenticationRequired + // } + // } + // Perform another find operation that succeeds. + // Assert that the human callback has been called twice. + // Assert that the ordering of list started events is [find], , find. Note that if the listener stat could + // not be cleared then there will and be extra find command. + // Assert that the list of command succeeded events is [find]. + // Assert that a find operation failed once during the command execution. + // Close the client. + beforeEach(async function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + monitorCommands: true, + retryReads: false + }); + utilClient = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: createCallback() + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + await collection.findOne(); + expect(callbackSpy).to.have.been.calledOnce; + client.on('commandStarted', event => { + if (event.commandName === 'find') commandStartedEvents.push(event.commandName); + }); + client.on('commandSucceeded', event => { + if (event.commandName === 'find') commandSucceededEvents.push(event.commandName); + }); + client.on('commandFailed', event => { + if (event.commandName === 'find') commandFailedEvents.push(event.commandName); + }); + await utilClient + .db() + .admin() + .command({ + configureFailPoint: 'failCommand', + mode: { + times: 1 + }, + data: { + failCommands: ['find'], + errorCode: 391 + } + }); + }); + + afterEach(async function () { + await utilClient.db().admin().command({ + configureFailPoint: 'failCommand', + mode: 'off' + }); + await utilClient.close(); + }); + + it('successfully authenticates', async function () { + await collection.findOne(); + expect(callbackSpy).to.have.been.calledTwice; + expect(commandStartedEvents).to.deep.equal(['find', 'find']); + expect(commandSucceededEvents).to.deep.equal(['find']); + expect(commandFailedEvents).to.deep.equal(['find']); + }); + }); + + describe('4.2 Succeeds no refresh', function () { + let utilClient: MongoClient; + const callbackSpy = sinon.spy(createCallback()); + // Create an OIDC configured client with a human callback that does not return a refresh token. + // Perform a find operation that succeeds. + // Assert that the human callback has been called once. + // Force a reauthenication using a fail point of the form: + // { + // configureFailPoint: "failCommand", + // mode: { + // times: 1 + // }, + // data: { + // failCommands: [ + // "find" + // ], + // errorCode: 391 // ReauthenticationRequired + // } + // } + // Perform a find operation that succeeds. + // Assert that the human callback has been called twice. + // Close the client. + beforeEach(async function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + monitorCommands: true, + retryReads: false + }); + utilClient = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: createCallback() + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + await collection.findOne(); + expect(callbackSpy).to.have.been.calledOnce; + await utilClient + .db() + .admin() + .command({ + configureFailPoint: 'failCommand', + mode: { + times: 1 + }, + data: { + failCommands: ['find'], + errorCode: 391 + } + }); + }); + + afterEach(async function () { + await utilClient.db().admin().command({ + configureFailPoint: 'failCommand', + mode: 'off' + }); + await utilClient.close(); + }); + + it('successfully authenticates', async function () { + await collection.findOne(); + expect(callbackSpy).to.have.been.calledTwice; + }); + }); + + describe('4.3 Succeeds after refresh fails', function () { + const createBadCallback = () => { + return async () => { + const token = await readFile(path.join(process.env.OIDC_TOKEN_DIR, 'test_user1'), { + encoding: 'utf8' + }); + return generateResult(token, 10000, { refreshToken: 'bad' }); + }; + }; + + let utilClient: MongoClient; + const callbackSpy = sinon.spy(createBadCallback()); + // Create an OIDC configured client with a callback that returns the test_user1 access token and a bad refresh token. + // Perform a find operation that succeeds. + // Assert that the human callback has been called once. + // Force a reauthenication using a fail point of the form: + // { + // configureFailPoint: "failCommand", + // mode: { + // times: 1 + // }, + // data: { + // failCommands: [ + // "find", + // ], + // errorCode: 391 // ReauthenticationRequired + // } + // } + // Perform a find operation that succeeds. + // Assert that the human callback has been called 2 times. + // Close the client. + beforeEach(async function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + monitorCommands: true, + retryReads: false + }); + utilClient = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: createCallback() + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + await collection.findOne(); + expect(callbackSpy).to.have.been.calledOnce; + await utilClient + .db() + .admin() + .command({ + configureFailPoint: 'failCommand', + mode: { + times: 1 + }, + data: { + failCommands: ['find'], + errorCode: 391 + } + }); + }); + + afterEach(async function () { + await utilClient.db().admin().command({ + configureFailPoint: 'failCommand', + mode: 'off' + }); + await utilClient.close(); + }); + + it('successfully authenticates', async function () { + await collection.findOne(); + expect(callbackSpy).to.have.been.calledTwice; + }); + }); + + describe('4.4 Fails', function () { + let accessCount = 0; + + const createBadCallback = () => { + return async () => { + let token; + if (accessCount === 0) { + token = await readFile(path.join(process.env.OIDC_TOKEN_DIR, 'test_user1'), { + encoding: 'utf8' + }); + } else { + token = 'bad'; + } + accessCount++; + return generateResult(token, 10000, { refreshToken: 'bad' }); + }; + }; + + let utilClient: MongoClient; + const callbackSpy = sinon.spy(createBadCallback()); + // Create an OIDC configured client that returns invalid refresh tokens and returns invalid access tokens after the first access. + // Perform a find operation that succeeds. + // Assert that the human callback has been called once. + // Force a reauthenication using a failCommand of the form: + // { + // configureFailPoint: "failCommand", + // mode: { + // times: 1 + // }, + // data: { + // failCommands: [ + // "find", + // ], + // errorCode: 391 // ReauthenticationRequired + // } + // } + // Perform a find operation that fails. + // Assert that the human callback has been called three times. + // Close the client. + beforeEach(async function () { + client = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: callbackSpy + }, + monitorCommands: true, + retryReads: false + }); + utilClient = new MongoClient(uriSingle, { + authMechanismProperties: { + OIDC_HUMAN_CALLBACK: createCallback() + }, + retryReads: false + }); + collection = client.db('test').collection('testHuman'); + await collection.findOne(); + expect(callbackSpy).to.have.been.calledOnce; + await utilClient + .db() + .admin() + .command({ + configureFailPoint: 'failCommand', + mode: { + times: 1 + }, + data: { + failCommands: ['find'], + errorCode: 391 + } + }); + }); + + afterEach(async function () { + await utilClient.db().admin().command({ + configureFailPoint: 'failCommand', + mode: 'off' + }); + await utilClient.close(); + }); + + it('does not successfully authenticate', async function () { + const error = await collection.findOne().catch(error => error); + expect(error).to.exist; + expect(callbackSpy).to.have.been.calledThrice; + }); + }); + }); + }); +}); diff --git a/test/integration/auth/mongodb_oidc_azure.prose.05.test.ts b/test/integration/auth/mongodb_oidc_azure.prose.05.test.ts new file mode 100644 index 00000000000..847678537e4 --- /dev/null +++ b/test/integration/auth/mongodb_oidc_azure.prose.05.test.ts @@ -0,0 +1,85 @@ +import { expect } from 'chai'; + +import { type Collection, MongoClient, type MongoClientOptions } from '../../mongodb'; + +const DEFAULT_URI = 'mongodb://127.0.0.1:27017'; + +describe('OIDC Auth Spec Azure Tests', function () { + describe('5. Azure Tests', function () { + let client: MongoClient; + let collection: Collection; + + beforeEach(function () { + if (!this.configuration.isOIDC(process.env.MONGODB_URI_SINGLE, 'azure')) { + this.skipReason = 'Azure OIDC tests require an Azure OIDC environment.'; + this.skip(); + } + }); + + afterEach(async function () { + await client?.close(); + }); + + describe('5.1 Azure With No Username', function () { + // Create an OIDC configured client with ENVIRONMENT:azure and a valid TOKEN_RESOURCE and no username. + // Perform a find operation that succeeds. + // Close the client. + beforeEach(function () { + const options: MongoClientOptions = {}; + if (process.env.AZUREOIDC_RESOURCE) { + options.authMechanismProperties = { TOKEN_RESOURCE: process.env.AZUREOIDC_RESOURCE }; + } + client = new MongoClient(process.env.MONGODB_URI_SINGLE ?? DEFAULT_URI, options); + collection = client.db('test').collection('test'); + }); + + it('successfully authenticates', async function () { + const result = await collection.findOne(); + expect(result).to.not.be.null; + }); + }); + + describe('5.2 Azure With Bad Username', function () { + // Create an OIDC configured client with ENVIRONMENT:azure and a valid TOKEN_RESOURCE and a username of "bad". + // Perform a find operation that fails. + // Close the client. + beforeEach(function () { + const options: MongoClientOptions = {}; + if (process.env.AZUREOIDC_USERNAME) { + options.auth = { username: 'bad', password: undefined }; + } + if (process.env.AZUREOIDC_RESOURCE) { + options.authMechanismProperties = { TOKEN_RESOURCE: process.env.AZUREOIDC_RESOURCE }; + } + client = new MongoClient(process.env.MONGODB_URI_SINGLE ?? DEFAULT_URI, options); + collection = client.db('test').collection('test'); + }); + + it('does not authenticate', async function () { + const error = await collection.findOne().catch(error => error); + expect(error.message).to.include('Azure endpoint'); + }); + }); + + describe('5.3 Azure With Valid Username', function () { + // This prose test does not exist in the spec but the new OIDC setup scripts + // have a username in the environment so worth testing. + beforeEach(function () { + const options: MongoClientOptions = {}; + if (process.env.AZUREOIDC_USERNAME) { + options.auth = { username: process.env.AZUREOIDC_USERNAME, password: undefined }; + } + if (process.env.AZUREOIDC_RESOURCE) { + options.authMechanismProperties = { TOKEN_RESOURCE: process.env.AZUREOIDC_RESOURCE }; + } + client = new MongoClient(process.env.MONGODB_URI_SINGLE ?? DEFAULT_URI, options); + collection = client.db('test').collection('test'); + }); + + it('successfully authenticates', async function () { + const result = await collection.findOne(); + expect(result).to.not.be.null; + }); + }); + }); +}); diff --git a/test/integration/auth/mongodb_oidc_azure.prose.test.ts b/test/integration/auth/mongodb_oidc_azure.prose.test.ts deleted file mode 100644 index 2dc95b4c935..00000000000 --- a/test/integration/auth/mongodb_oidc_azure.prose.test.ts +++ /dev/null @@ -1,209 +0,0 @@ -import { expect } from 'chai'; - -import { - type Collection, - type CommandFailedEvent, - type CommandStartedEvent, - type CommandSucceededEvent, - type MongoClient, - OIDC_WORKFLOWS -} from '../../mongodb'; - -describe('OIDC Auth Spec Prose Tests', function () { - const callbackCache = OIDC_WORKFLOWS.get('callback').cache; - const azureCache = OIDC_WORKFLOWS.get('azure').cache; - - describe('3. Azure Automatic Auth', function () { - let client: MongoClient; - let collection: Collection; - - beforeEach(function () { - if (!this.configuration.isAzureOIDC(process.env.MONGODB_URI)) { - this.skipReason = 'Azure OIDC prose tests require an Azure OIDC environment.'; - this.skip(); - } - }); - - afterEach(async function () { - await client?.close(); - }); - - describe('3.1 Connect', function () { - beforeEach(function () { - client = this.configuration.newClient(process.env.MONGODB_URI); - collection = client.db('test').collection('test'); - }); - - // Create a client with a url of the form mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:azure,TOKEN_AUDIENCE:. - // Assert that a find operation succeeds. - // Close the client. - it('successfully authenticates', async function () { - const result = await collection.findOne(); - expect(result).to.be.null; - }); - }); - - describe('3.2 Allowed Hosts Ignored', function () { - beforeEach(function () { - client = this.configuration.newClient(process.env.MONGODB_URI, { - authMechanismProperties: { - ALLOWED_HOSTS: [] - } - }); - collection = client.db('test').collection('test'); - }); - - // Create a client with a url of the form mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:azure,TOKEN_AUDIENCE:, - // and an ALLOWED_HOSTS that is an empty list. - // Assert that a find operation succeeds. - // Close the client. - it('successfully authenticates', async function () { - const result = await collection.findOne(); - expect(result).to.be.null; - }); - }); - - describe('3.3 Main Cache Not Used', function () { - beforeEach(function () { - callbackCache?.clear(); - client = this.configuration.newClient(process.env.MONGODB_URI); - collection = client.db('test').collection('test'); - }); - - // Clear the main OIDC cache. - // Create a client with a url of the form mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:azure,TOKEN_AUDIENCE:. - // Assert that a find operation succeeds. - // Close the client. - // Assert that the main OIDC cache is empty. - it('does not use the main callback cache', async function () { - const result = await collection.findOne(); - expect(result).to.be.null; - expect(callbackCache.entries).to.be.empty; - }); - }); - - describe('3.4 Azure Cache is Used', function () { - beforeEach(function () { - callbackCache?.clear(); - azureCache?.clear(); - client = this.configuration.newClient(process.env.MONGODB_URI); - collection = client.db('test').collection('test'); - }); - - // Clear the Azure OIDC cache. - // Create a client with a url of the form mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:azure,TOKEN_AUDIENCE:. - // Assert that a find operation succeeds. - // Close the client. - // Assert that the Azure OIDC cache has one entry. - it('uses the Azure OIDC cache', async function () { - const result = await collection.findOne(); - expect(result).to.be.null; - expect(callbackCache.entries).to.be.empty; - expect(azureCache.entries.size).to.equal(1); - }); - }); - - describe('3.5 Reauthentication Succeeds', function () { - const commandStartedEvents: CommandStartedEvent[] = []; - const commandSucceededEvents: CommandSucceededEvent[] = []; - const commandFailedEvents: CommandFailedEvent[] = []; - - const commandStartedListener = event => { - if (event.commandName === 'find') { - commandStartedEvents.push(event); - } - }; - const commandSucceededListener = event => { - if (event.commandName === 'find') { - commandSucceededEvents.push(event); - } - }; - const commandFailedListener = event => { - if (event.commandName === 'find') { - commandFailedEvents.push(event); - } - }; - - const addListeners = () => { - client.on('commandStarted', commandStartedListener); - client.on('commandSucceeded', commandSucceededListener); - client.on('commandFailed', commandFailedListener); - }; - - // Sets up the fail point for the find to reauthenticate. - const setupFailPoint = async () => { - return await client - .db() - .admin() - .command({ - configureFailPoint: 'failCommand', - mode: { - times: 1 - }, - data: { - failCommands: ['find'], - errorCode: 391 - } - }); - }; - - // Removes the fail point. - const removeFailPoint = async () => { - return await client.db().admin().command({ - configureFailPoint: 'failCommand', - mode: 'off' - }); - }; - - beforeEach(async function () { - azureCache?.clear(); - client = this.configuration.newClient(process.env.MONGODB_URI, { monitorCommands: true }); - await client.db('test').collection('test').findOne(); - addListeners(); - await setupFailPoint(); - }); - - afterEach(async function () { - await removeFailPoint(); - }); - - // Clear the Azure OIDC cache. - // Create a client with an event listener. The following assumes that the driver does not emit saslStart or saslContinue events. If the driver does emit those events, ignore/filter them for the purposes of this test. - // Perform a find operation that succeeds. - // Clear the listener state if possible. - // Force a reauthenication using a failCommand of the form: - // - // { - // "configureFailPoint": "failCommand", - // "mode": { - // "times": 1 - // }, - // "data": { - // "failCommands": [ - // "find" - // ], - // "errorCode": 391 - // } - // } - // - //Note - // - //the driver MUST either use a unique appName or explicitly remove the failCommand after the test to prevent leakage. - // - //Perform another find operation that succeeds. - //Assert that the ordering of list started events is [find], , find. Note that if the listener stat could not be cleared then there will and be extra find command. - //Assert that the list of command succeeded events is [find]. - //Assert that a find operation failed once during the command execution. - //Close the client. - it('successfully reauthenticates', async function () { - await client.db('test').collection('test').findOne(); - expect(commandStartedEvents.map(event => event.commandName)).to.deep.equal([ - 'find', - 'find' - ]); - expect(commandSucceededEvents.map(event => event.commandName)).to.deep.equal(['find']); - expect(commandFailedEvents.map(event => event.commandName)).to.deep.equal(['find']); - }); - }); - }); -}); diff --git a/test/integration/auth/mongodb_oidc_gcp.prose.06.test.ts b/test/integration/auth/mongodb_oidc_gcp.prose.06.test.ts new file mode 100644 index 00000000000..42b36e7f279 --- /dev/null +++ b/test/integration/auth/mongodb_oidc_gcp.prose.06.test.ts @@ -0,0 +1,54 @@ +import { expect } from 'chai'; + +import { type Collection, MongoClient, type MongoClientOptions } from '../../mongodb'; + +const DEFAULT_URI = 'mongodb://127.0.0.1:27017'; + +describe('OIDC Auth Spec GCP Tests', function () { + // Note there is no spec or tests for GCP yet, these are 2 scenarios based on the + // drivers tools scripts available. + describe('6. GCP Tests', function () { + let client: MongoClient; + let collection: Collection; + + beforeEach(function () { + if (!this.configuration.isOIDC(process.env.MONGODB_URI_SINGLE, 'gcp')) { + this.skipReason = 'GCP OIDC prose tests require a GCP OIDC environment.'; + this.skip(); + } + }); + + afterEach(async function () { + await client?.close(); + }); + + describe('6.1 GCP With Valid Token Resource', function () { + beforeEach(function () { + const options: MongoClientOptions = {}; + if (process.env.GCPOIDC_AUDIENCE) { + options.authMechanismProperties = { TOKEN_RESOURCE: process.env.GCPOIDC_AUDIENCE }; + } + client = new MongoClient(process.env.MONGODB_URI_SINGLE ?? DEFAULT_URI, options); + collection = client.db('test').collection('test'); + }); + + it('successfully authenticates', async function () { + const result = await collection.findOne(); + expect(result).to.not.be.null; + }); + }); + + describe('6.2 GCP With Invalid Token Resource', function () { + beforeEach(function () { + const options: MongoClientOptions = { authMechanismProperties: { TOKEN_RESOURCE: 'bad' } }; + client = new MongoClient(process.env.MONGODB_URI_SINGLE ?? DEFAULT_URI, options); + collection = client.db('test').collection('test'); + }); + + it('successfully authenticates', async function () { + const result = await collection.findOne(); + expect(result).to.not.be.null; + }); + }); + }); +}); diff --git a/test/manual/mongodb_oidc.prose.test.ts b/test/manual/mongodb_oidc.prose.test.ts deleted file mode 100644 index bb4cfcb671f..00000000000 --- a/test/manual/mongodb_oidc.prose.test.ts +++ /dev/null @@ -1,1230 +0,0 @@ -import { readFile } from 'node:fs/promises'; -import * as path from 'node:path'; - -import { expect } from 'chai'; -import * as sinon from 'sinon'; - -import { - type Collection, - type CommandFailedEvent, - type CommandStartedEvent, - type CommandSucceededEvent, - type IdPServerInfo, - MongoClient, - MongoInvalidArgumentError, - MongoMissingCredentialsError, - MongoServerError, - OIDC_WORKFLOWS, - type OIDCCallbackContext -} from '../mongodb'; -import { sleep } from '../tools/utils'; - -describe('MONGODB-OIDC', function () { - context('when running in the environment', function () { - it('contains AWS_WEB_IDENTITY_TOKEN_FILE', function () { - expect(process.env).to.have.property('AWS_WEB_IDENTITY_TOKEN_FILE'); - }); - }); - - describe('OIDC Auth Spec Prose Tests', function () { - // Set up the cache variable. - const cache = OIDC_WORKFLOWS.get('callback').cache; - const callbackCache = OIDC_WORKFLOWS.get('callback').callbackCache; - // Creates a request function for use in the test. - const createRequestCallback = ( - username = 'test_user1', - expiresInSeconds?: number, - extraFields?: any - ) => { - return async (info: IdPServerInfo, context: OIDCCallbackContext) => { - const token = await readFile(path.join(process.env.OIDC_TOKEN_DIR, username), { - encoding: 'utf8' - }); - // Do some basic property assertions. - expect(context).to.have.property('timeoutSeconds'); - expect(info).to.have.property('issuer'); - expect(info).to.have.property('clientId'); - return generateResult(token, expiresInSeconds, extraFields); - }; - }; - - // Creates a refresh function for use in the test. - const createRefreshCallback = ( - username = 'test_user1', - expiresInSeconds?: number, - extraFields?: any - ) => { - return async (info: IdPServerInfo, context: OIDCCallbackContext) => { - const token = await readFile(path.join(process.env.OIDC_TOKEN_DIR, username), { - encoding: 'utf8' - }); - // Do some basic property assertions. - expect(context).to.have.property('timeoutSeconds'); - expect(info).to.have.property('issuer'); - expect(info).to.have.property('clientId'); - return generateResult(token, expiresInSeconds, extraFields); - }; - }; - - // Generates the result the request or refresh callback returns. - const generateResult = (token: string, expiresInSeconds?: number, extraFields?: any) => { - const response: OIDCRequestTokenResult = { accessToken: token }; - if (expiresInSeconds) { - response.expiresInSeconds = expiresInSeconds; - } - if (extraFields) { - return { ...response, ...extraFields }; - } - return response; - }; - - beforeEach(function () { - callbackCache.clear(); - }); - - describe('1. Callback-Driven Auth', function () { - let client: MongoClient; - let collection: Collection; - - beforeEach(function () { - cache.clear(); - }); - - afterEach(async function () { - await client?.close(); - }); - - describe('1.1 Single Principal Implicit Username', function () { - before(function () { - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: { - REQUEST_TOKEN_CALLBACK: createRequestCallback() - } - }); - collection = client.db('test').collection('test'); - }); - - // Clear the cache. - // Create a request callback returns a valid token. - // Create a client that uses the default OIDC url and the request callback. - // Perform a find operation. that succeeds. - // Close the client. - it('successfully authenticates', async function () { - const result = await collection.findOne(); - expect(result).to.be.null; - }); - }); - - describe('1.2 Single Principal Explicit Username', function () { - before(function () { - client = new MongoClient('mongodb://test_user1@localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: { - REQUEST_TOKEN_CALLBACK: createRequestCallback() - } - }); - collection = client.db('test').collection('test'); - }); - - // Clear the cache. - // Create a request callback that returns a valid token. - // Create a client with a url of the form mongodb://test_user1@localhost/?authMechanism=MONGODB-OIDC and the OIDC request callback. - // Perform a find operation that succeeds. - // Close the client. - it('successfully authenticates', async function () { - const result = await collection.findOne(); - expect(result).to.be.null; - }); - }); - - describe('1.3 Multiple Principal User 1', function () { - before(function () { - client = new MongoClient( - 'mongodb://test_user1@localhost:27018/?authMechanism=MONGODB-OIDC&directConnection=true&readPreference=secondaryPreferred', - { - authMechanismProperties: { - REQUEST_TOKEN_CALLBACK: createRequestCallback() - } - } - ); - collection = client.db('test').collection('test'); - }); - - // Clear the cache. - // Create a request callback that returns a valid token. - // Create a client with a url of the form mongodb://test_user1@localhost:27018/?authMechanism=MONGODB-OIDC&directConnection=true&readPreference=secondaryPreferred and a valid OIDC request callback. - // Perform a find operation that succeeds. - // Close the client. - it('successfully authenticates', async function () { - const result = await collection.findOne(); - expect(result).to.be.null; - }); - }); - - describe('1.4 Multiple Principal User 2', function () { - before(function () { - client = new MongoClient( - 'mongodb://test_user2@localhost:27018/?authMechanism=MONGODB-OIDC&directConnection=true&readPreference=secondaryPreferred', - { - authMechanismProperties: { - REQUEST_TOKEN_CALLBACK: createRequestCallback('test_user2') - } - } - ); - collection = client.db('test').collection('test'); - }); - - // Clear the cache. - // Create a request callback that reads in the generated test_user2 token file. - // Create a client with a url of the form mongodb://test_user2@localhost:27018/?authMechanism=MONGODB-OIDC&directConnection=true&readPreference=secondaryPreferred and a valid OIDC request callback. - // Perform a find operation that succeeds. - // Close the client. - it('successfully authenticates', async function () { - const result = await collection.findOne(); - expect(result).to.be.null; - }); - }); - - describe('1.5 Multiple Principal No User', function () { - before(function () { - client = new MongoClient( - 'mongodb://localhost:27018/?authMechanism=MONGODB-OIDC&directConnection=true&readPreference=secondaryPreferred', - { - authMechanismProperties: { - REQUEST_TOKEN_CALLBACK: createRequestCallback() - } - } - ); - collection = client.db('test').collection('test'); - }); - - // Clear the cache. - // Create a client with a url of the form mongodb://localhost:27018/?authMechanism=MONGODB-OIDC&directConnection=true&readPreference=secondaryPreferred and a valid OIDC request callback. - // Assert that a find operation fails. - // Close the client. - it('fails authentication', async function () { - try { - await collection.findOne(); - expect.fail('Expected OIDC auth to fail with no user provided'); - } catch (e) { - expect(e).to.be.instanceOf(MongoServerError); - expect(e.message).to.include('Authentication failed'); - } - }); - }); - - describe('1.6 Allowed Hosts Blocked', function () { - before(function () { - cache.clear(); - }); - - // Clear the cache. - // Create a client that uses the OIDC url and a request callback, and an - // ``ALLOWED_HOSTS`` that is an empty list. - // Assert that a ``find`` operation fails with a client-side error. - // Close the client. - context('when ALLOWED_HOSTS is empty', function () { - before(function () { - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: { - ALLOWED_HOSTS: [], - REQUEST_TOKEN_CALLBACK: createRequestCallback('test_user1', 600) - } - }); - collection = client.db('test').collection('test'); - }); - - it('fails validation', async function () { - const error = await collection.findOne().catch(error => error); - expect(error).to.be.instanceOf(MongoInvalidArgumentError); - expect(error.message).to.include( - 'is not valid for OIDC authentication with ALLOWED_HOSTS' - ); - }); - }); - - // Create a client that uses the url ``mongodb://localhost/?authMechanism=MONGODB-OIDC&ignored=example.com`` a request callback, and an - // ``ALLOWED_HOSTS`` that contains ["example.com"]. - // Assert that a ``find`` operation fails with a client-side error. - // Close the client. - context('when ALLOWED_HOSTS does not match', function () { - beforeEach(function () { - this.currentTest.skipReason = 'Will fail URI parsing as ignored is not a valid option'; - this.skip(); - // client = new MongoClient( - // 'mongodb://localhost/?authMechanism=MONGODB-OIDC&ignored=example.com', - // { - // authMechanismProperties: { - // ALLOWED_HOSTS: ['example.com'], - // REQUEST_TOKEN_CALLBACK: createRequestCallback('test_user1', 600) - // } - // } - // ); - // collection = client.db('test').collection('test'); - }); - - it('fails validation', async function () { - // try { - // await collection.findOne(); - // } catch (error) { - // expect(error).to.be.instanceOf(MongoInvalidArgumentError); - // expect(error.message).to.include('Host does not match provided ALLOWED_HOSTS values'); - // } - }); - }); - - // Create a client that uses the url ``mongodb://evilmongodb.com`` a request - // callback, and an ``ALLOWED_HOSTS`` that contains ``*mongodb.com``. - // Assert that a ``find`` operation fails with a client-side error. - // Close the client. - context('when ALLOWED_HOSTS is invalid', function () { - before(function () { - client = new MongoClient('mongodb://evilmongodb.com/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: { - ALLOWED_HOSTS: ['*mongodb.com'], - REQUEST_TOKEN_CALLBACK: createRequestCallback('test_user1', 600) - } - }); - collection = client.db('test').collection('test'); - }); - - it('fails validation', async function () { - const error = await collection.findOne().catch(error => error); - expect(error).to.be.instanceOf(MongoInvalidArgumentError); - expect(error.message).to.include( - 'is not valid for OIDC authentication with ALLOWED_HOSTS' - ); - }); - }); - }); - - describe('1.7 Lock Avoids Extra Callback Calls', function () { - let requestCounter = 0; - - before(function () { - cache.clear(); - }); - - const requestCallback = async () => { - requestCounter++; - if (requestCounter > 1) { - throw new Error('Request callback was entered simultaneously.'); - } - const token = await readFile(path.join(process.env.OIDC_TOKEN_DIR, 'test_user1'), { - encoding: 'utf8' - }); - await sleep(3000); - requestCounter--; - return generateResult(token, 300); - }; - const refreshCallback = createRefreshCallback(); - const requestSpy = sinon.spy(requestCallback); - const refreshSpy = sinon.spy(refreshCallback); - - const createClient = () => { - return new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: { - REQUEST_TOKEN_CALLBACK: requestSpy, - REFRESH_TOKEN_CALLBACK: refreshSpy - } - }); - }; - - const authenticate = async () => { - const client = createClient(); - await client.db('test').collection('test').findOne(); - await client.close(); - }; - - const testPromise = async () => { - await authenticate(); - await authenticate(); - }; - - // Clear the cache. - // Create a request callback that returns a token that will expire soon, and - // a refresh callback. Ensure that the request callback has a time delay, and - // that we can record the number of times each callback is called. - // Spawn two threads that do the following: - // - Create a client with the callbacks. - // - Run a find operation that succeeds. - // - Close the client. - // - Create a new client with the callbacks. - // - Run a find operation that succeeds. - // - Close the client. - // Join the two threads. - // Ensure that the request callback has been called once, and the refresh - // callback has been called twice. - it('does not simultaneously enter a callback', async function () { - await Promise.all([testPromise(), testPromise()]); - // The request callback will get called twice, but will not be entered - // simultaneously. If it does, the function will throw and we'll have - // and exception here. - expect(requestSpy).to.have.been.calledTwice; - expect(refreshSpy).to.have.been.calledTwice; - }); - }); - }); - - describe('2. AWS Automatic Auth', function () { - let client: MongoClient; - let collection: Collection; - - afterEach(async function () { - await client?.close(); - }); - - describe('2.1 Single Principal', function () { - before(function () { - client = new MongoClient( - 'mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:aws' - ); - collection = client.db('test').collection('test'); - }); - - // Create a client with a url of the form mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:aws. - // Perform a find operation that succeeds. - // Close the client. - it('successfully authenticates', async function () { - const result = await collection.findOne(); - expect(result).to.be.null; - }); - }); - - describe('2.2 Multiple Principal User 1', function () { - before(function () { - client = new MongoClient( - 'mongodb://localhost:27018/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:aws&directConnection=true&readPreference=secondaryPreferred' - ); - collection = client.db('test').collection('test'); - }); - - // Create a client with a url of the form mongodb://localhost:27018/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:aws&directConnection=true&readPreference=secondaryPreferred. - // Perform a find operation that succeeds. - // Close the client. - it('successfully authenticates', async function () { - const result = await collection.findOne(); - expect(result).to.be.null; - }); - }); - - describe('2.3 Multiple Principal User 2', function () { - let tokenFile; - - before(function () { - tokenFile = process.env.AWS_WEB_IDENTITY_TOKEN_FILE; - process.env.AWS_WEB_IDENTITY_TOKEN_FILE = path.join( - process.env.OIDC_TOKEN_DIR, - 'test_user2' - ); - client = new MongoClient( - 'mongodb://localhost:27018/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:aws&directConnection=true&readPreference=secondaryPreferred' - ); - collection = client.db('test').collection('test'); - }); - - after(function () { - process.env.AWS_WEB_IDENTITY_TOKEN_FILE = tokenFile; - }); - - // Set the AWS_WEB_IDENTITY_TOKEN_FILE environment variable to the location of valid test_user2 credentials. - // Create a client with a url of the form mongodb://localhost:27018/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:aws&directConnection=true&readPreference=secondaryPreferred. - // Perform a find operation that succeeds. - // Close the client. - // Restore the AWS_WEB_IDENTITY_TOKEN_FILE environment variable to the location of valid test_user2 credentials. - it('successfully authenticates', async function () { - const result = await collection.findOne(); - expect(result).to.be.null; - }); - }); - - describe('2.4 Allowed Hosts Ignored', function () { - before(function () { - client = new MongoClient( - 'mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:aws', - { - authMechanismProperties: { - ALLOWED_HOSTS: [] - } - } - ); - collection = client.db('test').collection('test'); - }); - - // Create a client with a url of the form mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:aws, and an ALLOWED_HOSTS that is an empty list. - // Assert that a find operation succeeds. - // Close the client. - it('successfully authenticates', async function () { - const result = await collection.findOne(); - expect(result).to.be.null; - }); - }); - }); - - describe('3. Callback Validation', function () { - let client: MongoClient; - let collection: Collection; - - afterEach(async function () { - await client?.close(); - }); - - describe('3.1 Valid Callbacks', function () { - const requestSpy = sinon.spy(createRequestCallback('test_user1', 60)); - const refreshSpy = sinon.spy(createRefreshCallback()); - const authMechanismProperties = { - REQUEST_TOKEN_CALLBACK: requestSpy, - REFRESH_TOKEN_CALLBACK: refreshSpy - }; - - before(async function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - collection = client.db('test').collection('test'); - await collection.findOne(); - expect(requestSpy).to.have.been.calledOnce; - await client.close(); - }); - - // Clear the cache. - // Create request and refresh callback that validate their inputs and return a valid token. The request callback must return a token that expires in one minute. - // Create a client that uses the above callbacks. - // Perform a find operation that succeeds. Verify that the request callback was called with the appropriate inputs, including the timeout parameter if possible. Ensure that there are no unexpected fields. - // Perform another find operation that succeeds. Verify that the refresh callback was called with the appropriate inputs, including the timeout parameter if possible. - // Close the client. - it('successfully authenticates with the request and refresh callbacks', async function () { - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - collection = client.db('test').collection('test'); - await collection.findOne(); - expect(refreshSpy).to.have.been.calledOnce; - }); - }); - - describe('3.2 Request Callback Returns Null', function () { - before(function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: { - REQUEST_TOKEN_CALLBACK: () => { - return Promise.resolve(null); - } - } - }); - collection = client.db('test').collection('test'); - }); - - // Clear the cache. - // Create a client with a request callback that returns null. - // Perform a find operation that fails. - // Close the client. - it('fails authentication', async function () { - try { - await collection.findOne(); - expect.fail('Expected OIDC auth to fail with null return from request callback'); - } catch (e) { - expect(e).to.be.instanceOf(MongoMissingCredentialsError); - expect(e.message).to.include( - 'User provided OIDC callbacks must return a valid object with an accessToken' - ); - } - }); - }); - - describe('3.3 Refresh Callback Returns Null', function () { - const authMechanismProperties = { - REQUEST_TOKEN_CALLBACK: createRequestCallback('test_user1', 60), - REFRESH_TOKEN_CALLBACK: () => { - return Promise.resolve(null); - } - }; - - before(async function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - collection = client.db('test').collection('test'); - await collection.findOne(); - await client.close(); - }); - - // Clear the cache. - // Create request callback that returns a valid token that will expire in a minute, and a refresh callback that returns null. - // Perform a find operation that succeeds. - // Perform a find operation that fails. - // Close the client. - it('fails authentication on refresh', async function () { - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - try { - await client.db('test').collection('test').findOne(); - expect.fail('Expected OIDC auth to fail with invlid return from refresh callback'); - } catch (e) { - expect(e).to.be.instanceOf(MongoMissingCredentialsError); - expect(e.message).to.include( - 'User provided OIDC callbacks must return a valid object with an accessToken' - ); - } - }); - }); - - describe('3.4 Request Callback Returns Invalid Data', function () { - context('when the request callback has missing fields', function () { - before(function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: { - REQUEST_TOKEN_CALLBACK: () => { - return Promise.resolve({}); - } - } - }); - collection = client.db('test').collection('test'); - }); - - // Clear the cache. - // Create a client with a request callback that returns data not conforming to the OIDCRequestTokenResult with missing field(s). - // Perform a find operation that fails. - // Close the client. - it('fails authentication', async function () { - try { - await collection.findOne(); - expect.fail('Expected OIDC auth to fail with invlid return from request callback'); - } catch (e) { - expect(e).to.be.instanceOf(MongoMissingCredentialsError); - expect(e.message).to.include( - 'User provided OIDC callbacks must return a valid object with an accessToken' - ); - } - }); - }); - - context('when the request callback has extra fields', function () { - before(function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: { - REQUEST_TOKEN_CALLBACK: createRequestCallback('test_user1', 60, { foo: 'bar' }) - } - }); - collection = client.db('test').collection('test'); - }); - - // Create a client with a request callback that returns data not conforming to the OIDCRequestTokenResult with extra field(s). - // Perform a find operation that fails. - // Close the client. - it('fails authentication', async function () { - try { - await collection.findOne(); - expect.fail('Expected OIDC auth to fail with extra fields from request callback'); - } catch (e) { - expect(e).to.be.instanceOf(MongoMissingCredentialsError); - expect(e.message).to.include( - 'User provided OIDC callbacks must return a valid object with an accessToken' - ); - } - }); - }); - }); - - describe('3.5 Refresh Callback Returns Missing Data', function () { - const authMechanismProperties = { - REQUEST_TOKEN_CALLBACK: createRequestCallback('test_user1', 60), - REFRESH_TOKEN_CALLBACK: () => { - return Promise.resolve({}); - } - }; - - before(async function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - await client.db('test').collection('test').findOne(); - await client.close(); - }); - - // Clear the cache. - // Create request callback that returns a valid token that will expire in a minute, and a refresh callback that returns data not conforming to the OIDCRequestTokenResult with missing field(s). - // Create a client with the callbacks. - // Perform a find operation that succeeds. - // Close the client. - // Create a new client with the same callbacks. - // Perform a find operation that fails. - // Close the client. - it('fails authentication on the refresh', async function () { - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - try { - await client.db('test').collection('test').findOne(); - expect.fail('Expected OIDC auth to fail with missing data from refresh callback'); - } catch (e) { - expect(e).to.be.instanceOf(MongoMissingCredentialsError); - expect(e.message).to.include( - 'User provided OIDC callbacks must return a valid object with an accessToken' - ); - } - }); - }); - - describe('3.6 Refresh Callback Returns Extra Data', function () { - const authMechanismProperties = { - REQUEST_TOKEN_CALLBACK: createRequestCallback('test_user1', 60), - REFRESH_TOKEN_CALLBACK: createRefreshCallback('test_user1', 60, { foo: 'bar' }) - }; - - before(async function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - await client.db('test').collection('test').findOne(); - await client.close(); - }); - - // Clear the cache. - // Create request callback that returns a valid token that will expire in a minute, and a refresh callback that returns data not conforming to the OIDCRequestTokenResult with extra field(s). - // Create a client with the callbacks. - // Perform a find operation that succeeds. - // Close the client. - // Create a new client with the same callbacks. - // Perform a find operation that fails. - // Close the client. - it('fails authentication on the refresh', async function () { - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - try { - await client.db('test').collection('test').findOne(); - expect.fail('Expected OIDC auth to fail with extra fields from refresh callback'); - } catch (e) { - expect(e).to.be.instanceOf(MongoMissingCredentialsError); - expect(e.message).to.include( - 'User provided OIDC callbacks must return a valid object with an accessToken' - ); - } - }); - }); - }); - - describe('4. Cached Credentials', function () { - let client: MongoClient; - let collection: Collection; - - afterEach(async function () { - await client?.close(); - }); - - describe('4.1 Cache with refresh', function () { - const requestCallback = createRequestCallback('test_user1', 60); - const refreshSpy = sinon.spy(createRefreshCallback('test_user1', 60)); - const authMechanismProperties = { - REQUEST_TOKEN_CALLBACK: requestCallback, - REFRESH_TOKEN_CALLBACK: refreshSpy - }; - - before(async function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - await client.db('test').collection('test').findOne(); - await client.close(); - }); - // Clear the cache. - // Create a new client with a request callback that gives credentials that expire in on minute. - // Ensure that a find operation adds credentials to the cache. - // Close the client. - // Create a new client with the same request callback and a refresh callback. - // Ensure that a find operation results in a call to the refresh callback. - // Close the client. - it('successfully authenticates and calls the refresh callback', async function () { - // Ensure credentials added to the cache. - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - await client.db('test').collection('test').findOne(); - expect(refreshSpy).to.have.been.calledOnce; - }); - }); - - describe('4.2 Cache with no refresh', function () { - const requestSpy = sinon.spy(createRequestCallback('test_user1', 60)); - - before(async function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: { - REQUEST_TOKEN_CALLBACK: requestSpy - } - }); - await client.db('test').collection('test').findOne(); - await client.close(); - }); - - // Clear the cache. - // Create a new client with a request callback that gives credentials that expire in one minute. - // Ensure that a find operation adds credentials to the cache. - // Close the client. - // Create a new client with the a request callback but no refresh callback. - // Ensure that a find operation results in a call to the request callback. - // Close the client. - it('successfully authenticates and calls only the request callback', async function () { - expect(cache.entries.size).to.equal(1); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: { - REQUEST_TOKEN_CALLBACK: requestSpy - } - }); - await client.db('test').collection('test').findOne(); - expect(requestSpy).to.have.been.calledTwice; - }); - }); - - describe('4.3 Cache key includes callback', function () { - const firstRequestCallback = createRequestCallback('test_user1'); - const secondRequestCallback = createRequestCallback('test_user1'); - - before(async function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: { - REQUEST_TOKEN_CALLBACK: firstRequestCallback - } - }); - await client.db('test').collection('test').findOne(); - await client.close(); - }); - - // Clear the cache. - // Create a new client with a request callback that does not give an `expiresInSeconds` value. - // Ensure that a find operation adds credentials to the cache. - // Close the client. - // Create a new client with a different request callback. - // Ensure that a find operation replaces the one-time entry with a new entry to the cache. - // Close the client. - it('replaces expired entries in the cache', async function () { - expect(cache.entries.size).to.equal(1); - const initialKey = cache.entries.keys().next().value; - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: { - REQUEST_TOKEN_CALLBACK: secondRequestCallback - } - }); - await client.db('test').collection('test').findOne(); - expect(cache.entries.size).to.equal(1); - const newKey = cache.entries.keys().next().value; - expect(newKey).to.not.equal(initialKey); - }); - }); - - describe('4.4 Error clears cache', function () { - const authMechanismProperties = { - REQUEST_TOKEN_CALLBACK: createRequestCallback('test_user1', 300), - REFRESH_TOKEN_CALLBACK: () => { - return Promise.resolve({}); - } - }; - - before(async function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - await client.db('test').collection('test').findOne(); - expect(cache.entries.size).to.equal(1); - await client.close(); - }); - - // Clear the cache. - // Create a new client with a valid request callback that gives credentials that expire within 5 minutes and a refresh callback that gives invalid credentials. - // Ensure that a find operation adds a new entry to the cache. - // Ensure that a subsequent find operation results in an error. - // Ensure that the cached token has been cleared. - // Close the client. - it('clears the cache on authentication error', async function () { - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - try { - await client.db('test').collection('test').findOne(); - expect.fail('Expected OIDC auth to fail with invalid fields from refresh callback'); - } catch (error) { - expect(error).to.be.instanceOf(MongoMissingCredentialsError); - expect(error.message).to.include(''); - expect(cache.entries.size).to.equal(0); - } - }); - }); - - describe('4.5 AWS Automatic workflow does not use cache', function () { - before(function () { - cache.clear(); - client = new MongoClient( - 'mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:aws' - ); - collection = client.db('test').collection('test'); - }); - - // Clear the cache. - // Create a new client that uses the AWS automatic workflow. - // Ensure that a find operation does not add credentials to the cache. - // Close the client. - it('authenticates with no cache usage', async function () { - await collection.findOne(); - expect(cache.entries.size).to.equal(0); - }); - }); - }); - - describe('5. Speculative Authentication', function () { - let client: MongoClient; - const requestCallback = createRequestCallback('test_user1', 600); - const authMechanismProperties = { - REQUEST_TOKEN_CALLBACK: requestCallback - }; - - // Removes the fail point. - const removeFailPoint = async () => { - return await client.db().admin().command({ - configureFailPoint: 'failCommand', - mode: 'off' - }); - }; - - // Sets up the fail point for the saslStart - const setupFailPoint = async () => { - return await client - .db() - .admin() - .command({ - configureFailPoint: 'failCommand', - mode: { - times: 2 - }, - data: { - failCommands: ['saslStart'], - errorCode: 18 - } - }); - }; - - afterEach(async function () { - await removeFailPoint(); - await client?.close(); - }); - - before(async function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - await setupFailPoint(); - await client.db('test').collection('test').findOne(); - await client.close(); - }); - - // Clear the cache. - // Create a client with a request callback that returns a valid token that will not expire soon. - // Set a fail point for saslStart commands of the form: - // - // { - // "configureFailPoint": "failCommand", - // "mode": { - // "times": 2 - // }, - // "data": { - // "failCommands": [ - // "saslStart" - // ], - // "errorCode": 18 - // } - // } - // - // Note - // - // The driver MUST either use a unique appName or explicitly remove the failCommand after the test to prevent leakage. - // - // Perform a find operation that succeeds. - // Close the client. - // Create a new client with the same properties without clearing the cache. - // Set a fail point for saslStart commands. - // Perform a find operation that succeeds. - // Close the client. - it('successfully speculative authenticates', async function () { - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - await setupFailPoint(); - const result = await client.db('test').collection('test').findOne(); - expect(result).to.be.null; - }); - }); - - describe('6. Reauthentication', function () { - let client: MongoClient; - - // Removes the fail point. - const removeFailPoint = async () => { - return await client.db().admin().command({ - configureFailPoint: 'failCommand', - mode: 'off' - }); - }; - - describe('6.1 Succeeds', function () { - const requestCallback = createRequestCallback('test_user1', 600); - const refreshSpy = sinon.spy(createRefreshCallback('test_user1', 600)); - const authMechanismProperties = { - REQUEST_TOKEN_CALLBACK: requestCallback, - REFRESH_TOKEN_CALLBACK: refreshSpy - }; - const commandStartedEvents: CommandStartedEvent[] = []; - const commandSucceededEvents: CommandSucceededEvent[] = []; - const commandFailedEvents: CommandFailedEvent[] = []; - - const commandStartedListener = event => { - if (event.commandName === 'find') { - commandStartedEvents.push(event); - } - }; - const commandSucceededListener = event => { - if (event.commandName === 'find') { - commandSucceededEvents.push(event); - } - }; - const commandFailedListener = event => { - if (event.commandName === 'find') { - commandFailedEvents.push(event); - } - }; - - const addListeners = () => { - client.on('commandStarted', commandStartedListener); - client.on('commandSucceeded', commandSucceededListener); - client.on('commandFailed', commandFailedListener); - }; - - // Sets up the fail point for the find to reauthenticate. - const setupFailPoint = async () => { - return await client - .db() - .admin() - .command({ - configureFailPoint: 'failCommand', - mode: { - times: 1 - }, - data: { - failCommands: ['find'], - errorCode: 391 - } - }); - }; - - before(async function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - await client.db('test').collection('test').findOne(); - expect(refreshSpy).to.not.be.called; - client.close(); - }); - - afterEach(async function () { - await removeFailPoint(); - await client.close(); - }); - - // Clear the cache. - // Create request and refresh callbacks that return valid credentials that will not expire soon. - // Create a client with the callbacks and an event listener. The following assumes that the driver does not emit saslStart or saslContinue events. If the driver does emit those events, ignore/filter them for the purposes of this test. - // Perform a find operation that succeeds. - // Assert that the refresh callback has not been called. - // Clear the listener state if possible. - // Force a reauthenication using a failCommand of the form: - // - // { - // "configureFailPoint": "failCommand", - // "mode": { - // "times": 1 - // }, - // "data": { - // "failCommands": [ - // "find" - // ], - // "errorCode": 391 - // } - // } - // - // Note - // - // the driver MUST either use a unique appName or explicitly remove the failCommand after the test to prevent leakage. - // - // Perform another find operation that succeeds. - // Assert that the refresh callback has been called once, if possible. - // Assert that the ordering of list started events is [find], , find. Note that if the listener stat could not be cleared then there will and be extra find command. - // Assert that the list of command succeeded events is [find]. - // Assert that a find operation failed once during the command execution. - // Close the client. - it('successfully reauthenticates', async function () { - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties, - monitorCommands: true - }); - addListeners(); - await setupFailPoint(); - await client.db('test').collection('test').findOne(); - expect(refreshSpy).to.have.been.calledOnce; - expect(commandStartedEvents.map(event => event.commandName)).to.deep.equal([ - 'find', - 'find' - ]); - expect(commandSucceededEvents.map(event => event.commandName)).to.deep.equal(['find']); - expect(commandFailedEvents.map(event => event.commandName)).to.deep.equal(['find']); - }); - }); - - describe('6.2 Retries and Succeeds with Cache', function () { - const requestCallback = createRequestCallback('test_user1', 600); - const refreshCallback = createRefreshCallback('test_user1', 600); - const authMechanismProperties = { - REQUEST_TOKEN_CALLBACK: requestCallback, - REFRESH_TOKEN_CALLBACK: refreshCallback - }; - // Sets up the fail point for the find to reauthenticate. - const setupFailPoint = async () => { - return await client - .db() - .admin() - .command({ - configureFailPoint: 'failCommand', - mode: { - times: 1 - }, - data: { - failCommands: ['find', 'saslStart'], - errorCode: 391 - } - }); - }; - - before(async function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - await client.db('test').collection('test').findOne(); - await setupFailPoint(); - }); - - afterEach(async function () { - await removeFailPoint(); - await client.close(); - }); - - // Clear the cache. - // Create request and refresh callbacks that return valid credentials that will not expire soon. - // Perform a find operation that succeeds. - // Force a reauthenication using a failCommand of the form: - // - // { - // "configureFailPoint": "failCommand", - // "mode": { - // "times": 2 - // }, - // "data": { - // "failCommands": [ - // "find", "saslStart" - // ], - // "errorCode": 391 - // } - // } - // - // Perform a find operation that succeeds. - // Close the client. - it('successfully authenticates', async function () { - const result = await client.db('test').collection('test').findOne(); - expect(result).to.be.null; - }); - }); - - describe('6.3 Retries and Fails with no Cache', function () { - const requestCallback = createRequestCallback('test_user1', 600); - const refreshCallback = createRefreshCallback('test_user1', 600); - const authMechanismProperties = { - REQUEST_TOKEN_CALLBACK: requestCallback, - REFRESH_TOKEN_CALLBACK: refreshCallback - }; - // Sets up the fail point for the find to reauthenticate. - const setupFailPoint = async () => { - return await client - .db() - .admin() - .command({ - configureFailPoint: 'failCommand', - mode: { - times: 2 - }, - data: { - failCommands: ['find', 'saslStart'], - errorCode: 391 - } - }); - }; - - before(async function () { - cache.clear(); - client = new MongoClient('mongodb://localhost/?authMechanism=MONGODB-OIDC', { - authMechanismProperties: authMechanismProperties - }); - await client.db('test').collection('test').findOne(); - cache.clear(); - await setupFailPoint(); - }); - - afterEach(async function () { - await removeFailPoint(); - await client.close(); - }); - - // Clear the cache. - // Create request and refresh callbacks that return valid credentials that will not expire soon. - // Perform a find operation that succeeds (to force a speculative auth). - // Clear the cache. - // Force a reauthenication using a failCommand of the form: - // - // { - // "configureFailPoint": "failCommand", - // "mode": { - // "times": 2 - // }, - // "data": { - // "failCommands": [ - // "find", "saslStart" - // ], - // "errorCode": 391 - // } - // } - // - // Perform a find operation that fails. - // Close the client. - it('fails authentication', async function () { - try { - await client.db('test').collection('test').findOne(); - expect.fail('Reauthentication must fail on the saslStart error'); - } catch (error) { - // This is the saslStart failCommand bubbled up. - expect(error).to.be.instanceOf(MongoServerError); - } - }); - }); - }); - }); -}); diff --git a/test/mongodb.ts b/test/mongodb.ts index d6c78208695..2d44f357863 100644 --- a/test/mongodb.ts +++ b/test/mongodb.ts @@ -107,13 +107,11 @@ export * from '../src/cmap/auth/mongo_credentials'; export * from '../src/cmap/auth/mongocr'; export * from '../src/cmap/auth/mongodb_aws'; export * from '../src/cmap/auth/mongodb_oidc'; -export * from '../src/cmap/auth/mongodb_oidc/aws_service_workflow'; -export * from '../src/cmap/auth/mongodb_oidc/azure_service_workflow'; -export * from '../src/cmap/auth/mongodb_oidc/azure_token_cache'; -export * from '../src/cmap/auth/mongodb_oidc/callback_lock_cache'; +export * from '../src/cmap/auth/mongodb_oidc/azure_machine_workflow'; export * from '../src/cmap/auth/mongodb_oidc/callback_workflow'; -export * from '../src/cmap/auth/mongodb_oidc/service_workflow'; -export * from '../src/cmap/auth/mongodb_oidc/token_entry_cache'; +export * from '../src/cmap/auth/mongodb_oidc/gcp_machine_workflow'; +export * from '../src/cmap/auth/mongodb_oidc/machine_workflow'; +export * from '../src/cmap/auth/mongodb_oidc/token_machine_workflow'; export * from '../src/cmap/auth/plain'; export * from '../src/cmap/auth/providers'; export * from '../src/cmap/auth/scram'; diff --git a/test/readme.md b/test/readme.md index 117cf70972b..78230d35858 100644 --- a/test/readme.md +++ b/test/readme.md @@ -28,14 +28,14 @@ Below is a summary of the types of test automation in this repo. | Unit | `/test/unit` | The unit tests test individual pieces of code, typically functions. These tests do **not** interact with a real database, so mocks are used instead.

The unit test directory mirrors the `/src` directory structure with test file names matching the source file names of the code they test. | `npm run check:unit` | | Integration | `/test/integration` | The integration tests test that a given feature or piece of a feature is working as expected. These tests do **not** use mocks; instead, they interact with a real database.

The integration test directory follows the `test/spec` directory structure representing the different functional areas of the driver.

**Note:** The `.gitkeep` files are intentionally left to ensure that this directory structure is preserved even as the actual test files are moved around. | `npm run check:test` | | Benchmark | `/test/benchmarks` | The benchmark tests report how long a designated set of tests take to run. They are used to measure performance. | `npm run check:bench` | -| Specialized Environment | `/test/manual` | The specalized environment tests are functional tests that require specialized environment setups in Evergreen.

**Note**: "manual" in the directory path does not refer to tests that should be run manually. These tests are automated. These tests have a special Evergreen configuration and run in isolation from the other tests. | There is no single script for running all of the specialized environment tests. Instead, you can run the appropriate script based on the specialized environment you want to use:
- `npm run check:atlas` to test Atlas
- `npm run check:adl` to test Atlas Data Lake
- `npm run check:ocsp` to test OSCP
- `npm run check:kerberos` to test Kerberos
- `npm run check:tls` to test TLS
- `npm run check:ldap` to test LDAP authorization | +| Specialized Environment | `/test/manual` | The specalized environment tests are functional tests that require specialized environment setups in Evergreen.

**Note**: "manual" in the directory path does not refer to tests that should be run manually. These tests are automated. These tests have a special Evergreen configuration and run in isolation from the other tests. | There is no single script for running all of the specialized environment tests. Instead, you can run the appropriate script based on the specialized environment you want to use:
- `npm run check:atlas` to test Atlas
- `npm run check:adl` to test Atlas Data Lake
- `npm run check:ocsp` to test OCSP
- `npm run check:kerberos` to test Kerberos
- `npm run check:tls` to test TLS
- `npm run check:ldap` to test LDAP authorization | | TypeScript Definition | `/test/types` | The TypeScript definition tests verify the type definitions are correct. | `npm run check:tsd` | -| Github Actions | `/test/action` | Tests that run as Github actions such as dependency checking. | Currently only `npm run check:dependencies` but could be expanded to more in the future. | -| Code Examples | `/test/integration/node-specific/examples` | Code examples that are also paired with tests that show they are working examples. | Currently `npm run check:lambda` to test the AWS Lambda example with default auth and `npm run check:lambda:aws` to test the AWS Lambda example with AWS auth. | +| GitHub Actions | `/test/action` | Tests that run as GitHub Actions such as dependency checking. | Currently, only `npm run check:dependencies` but could be expanded to more in the future. | +| Code Examples | `/test/integration/node-specific/examples` | Code examples that are also paired with tests that show they are working examples. | Currently, `npm run check:lambda` to test the AWS Lambda example with default auth and `npm run check:lambda:aws` to test the AWS Lambda example with AWS auth. | ### Spec Tests -All of the MongoDB drivers follow the same [specifications (specs)][driver-specs]. Each spec has tests associated with it. Some of the tests are prose (written, descriptive) tests, which must be implemented on a case by case basis by the developers on the driver teams. Other tests are written in a standardized form as YAML and converted to JSON, which can be read by the specialized spec test runners that are implemented in each driver. +All of the MongoDB drivers follow the same [specifications (specs)][driver-specs]. Each spec has tests associated with it. Some of the tests are prose (written, descriptive) tests, which must be implemented on a case-by-case basis by the developers on the driver teams. Other tests are written in a standardized form as YAML and converted to JSON, which can be read by the specialized spec test runners that are implemented in each driver. The input test specifications are stored in `test/spec`. @@ -45,9 +45,17 @@ The actual implementations of the spec tests can be unit tests or integration te The easiest way to get started running the tests locally is to start a standalone server and run all of the tests. -Start a mongod standalone with our [cluster_setup.sh](tools/cluster_setup.sh) script: `./test/tools/cluster_setup.sh server`. +Start a `mongod` standalone with our [cluster_setup.sh](tools/cluster_setup.sh) script: -Then run the tests: `npm test`. +```sh +./test/tools/cluster_setup.sh server +``` + +Then run the tests: + +```sh +npm test +``` > **Note:** the command above will run a subset of the tests that work with the standalone server topology since the tests are being run against a standalone server. @@ -55,10 +63,10 @@ The output will show how many tests passed, failed, and are pending. Tests that In the following subsections, we'll dig into the details of running the tests. -### Testing With Authorization Enabled +### Testing With Authorization-Enabled -By default, the integration tests run with auth enabled and the cluster_setup.sh script defaults to starting servers with auth enabled. Tests can be run locally without auth by setting the environment -variable `AUTH` to the value of `noauth`. This must be a two step process of starting a server without auth enabled and then running the tests without auth enabled. +By default, the integration tests run with auth-enabled and the `cluster_setup.sh` script defaults to starting servers with auth-enabled. Tests can be run locally without auth by setting the environment +variable `AUTH` to the value of `noauth`. This must be a two-step process of starting a server without auth-enabled and then running the tests without auth-enabled. ```shell AUTH='noauth' ./test/tools/cluster_setup.sh @@ -68,28 +76,68 @@ AUTH='noauth' npm run check:test As we mentioned earlier, the tests check the topology of the MongoDB server being used and run the tests associated with that topology. Tests that don't have a matching topology will be skipped. -In the steps above, we started a standalone server: `./test/tools/cluster_setup.sh server`. +In the steps above, we started a standalone server: -You can use the same [cluster_setup.sh](tools/cluster_setup.sh) script to start a replica set or sharded cluster by passing the appropriate option: `./test/tools/cluster_setup.sh replica_set` or -`./test/tools/cluster_setup.sh sharded_cluster`. If you are running more than a standalone server, make sure your `ulimit` settings are in accordance with [MongoDB's recommendations][mongodb-ulimit]. Changing the settings on the latest versions of macOS can be tricky. See [this article][macos-ulimt] for tips. (You likely don't need to do the complicated maxproc steps.) +```sh +./test/tools/cluster_setup.sh server +``` -The [cluster_setup.sh](tools/cluster_setup.sh) script automatically stores the files associated with the MongoDB server in the `data` directory, which is stored at the top level of this repository. +You can use the same [cluster_setup.sh](tools/cluster_setup.sh) script to start a replica set or sharded cluster by passing the appropriate option: +```sh +./test/tools/cluster_setup.sh replica_set +``` +or +```sh +./test/tools/cluster_setup.sh sharded_cluster +``` +If you are running more than a standalone server, make sure your `ulimit` settings are in accordance with [MongoDB's recommendations][mongodb-ulimit]. Changing the settings on the latest versions of macOS can be tricky. See [this article][macos-ulimt] for tips. (You likely don't need to do the complicated `maxproc` steps.) + +The [cluster_setup.sh](tools/cluster_setup.sh) script automatically stores the files associated with the MongoDB server in the `data` directory, which is stored at the top-level of this repository. You can delete this directory if you want to ensure you're running a clean configuration. If you delete the directory, the associated database server will be stopped, and you will need to run [cluster_setup.sh](tools/cluster_setup.sh) again. -You can prefix `npm test` with a `MONGODB_URI` environment variable to point the tests to a specific deployment. For example, for a standalone server, you might use: `MONGODB_URI=mongodb://localhost:27017 npm test`. For a replica set, you might use: `MONGODB_URI=mongodb://localhost:31000,localhost:31001,localhost:31002/?replicaSet=rs npm test`. +You can prefix `npm test` with a `MONGODB_URI` environment variable to point the tests to a specific deployment. For example, for a standalone server, you might use: + +```sh +MONGODB_URI=mongodb://localhost:27017 npm test +``` + +For a replica set, you might use: + +```sh +MONGODB_URI=mongodb://localhost:31000,localhost:31001,localhost:31002/?replicaSet=rs npm test +``` ### Running Individual Tests -The easiest way to run a single test is by appending `.only()` to the test context you want to run. For example, you could update a test function to be `it.only(‘cool test’, function() {})`. Then -run the test using `npm run check:test` for a functional or integration test or `npm run check:unit` for a unit test. See [Mocha's documentation][mocha-only] for more detailed information on `.only()`. +The easiest way to run a single test is by appending `.only()` to the test context you want to run. For example, you could update a test function to be: -Another way to run a single test is to use Mocha's `grep` flag. For functional or integration tests, run `npm run check:test -- -g 'test name'`. For unit tests, run `npm run check:unit -- -g 'test name'`. See the [Mocha documentation][mocha-grep] for information on the `grep` flag. +```JavaScript +it.only('cool test', function() {}) +``` + +Then, run the test using `npm run check:test` for a functional or integration test or +`npm run check:unit` +for a unit test. See [Mocha's documentation][mocha-only] for more detailed information on `.only()`. + +Another way to run a single test is to use Mocha's `grep` flag. For functional or integration tests, run: +```sh +npm run check:test -- -g +``` +For unit tests, run: +```sh +npm run check:unit -- -g +``` +See the [Mocha documentation][mocha-grep] for information on the `grep` flag. ## Running the Tests in Evergreen -[Evergreen][evergreen-wiki] is the continuous integration (CI) system we use. Evergreen builds are automatically run whenever a pull request is created or when commits are pushed to particular branches (e.g., main, 4.0, and 3.6). +[Evergreen][evergreen-wiki] is the continuous integration (CI) system we use. Evergreen builds are automatically run whenever a pull request is created or when commits are pushed to particular branches (e.g., `main`, `4.0`, and `3.6`). -Each Evergreen build runs the test suite against a variety of build variants that include a combination of topologies, special environments, and operating systems. By default, commits in pull requests only run a subset of the build variants in order to save time and resources. To configure a build, update `.evergreen/config.yml.in` and then generate a new Evergreen config via `node .evergreen/generate_evergreen_tasks.js`. +Each Evergreen build runs the test suite against a variety of build variants that include a combination of topologies, special environments, and operating systems. By default, commits in pull requests only run a subset of the build variants in order to save time and resources. To configure a build, update `.evergreen/config.yml.in` and then generate a new Evergreen config via: + +```sh +node .evergreen/generate_evergreen_tasks.js +``` ### Manually Kicking Off Evergreen Builds @@ -117,11 +165,15 @@ Once you have the Evergreen CLI setup, you are ready to run a build. Keep in min 1. In a terminal, navigate to your node driver directory: - `cd node-mongodb-native` + ```sh + cd node-mongodb-native + ``` -1. Use the Evergreen `patch` command. `-y` skips the confirmation dialog. `-u` includes uncommitted changes. `-p [project name]` specifies the Evergreen project. --browse opens the patch URL in your browser. +1. Use the Evergreen `patch` command. `-y` skips the confirmation dialog. `-u` includes uncommitted changes. `-p [project name]` specifies the Evergreen project. `--browse` opens the patch URL in your browser. - `evergreen patch -y -u -p mongo-node-driver-next --browse` + ```sh + evergreen patch -y -u -p mongo-node-driver-next --browse + ``` 1. In your browser, select the build variants and tasks to run. @@ -145,7 +197,7 @@ modify the steps to work with existing Node projects. 1. Navigate to a new directory and create a new Node project by running `npm init` in a terminal and working through the interactive prompts. A new file named `package.json` will be created for you. 1. In `package.json`, create a new dependency for `mongodb` that points to your local copy of the driver. For example: - ``` + ```JSON "dependencies": { "mongodb": "/path-to-your-copy-of-the-driver-repo/node-mongodb-native" } @@ -161,19 +213,19 @@ modify the steps to work with existing Node projects. ### Framework -We use mocha to construct our test suites and chai to assert expectations. +We use `mocha` to construct our test suites and `chai` to assert expectations. Some special notes on how mocha works with our testing setup: - `before` hooks will run even if a test is skipped by the environment it runs on. - - So, for example, if your before hook does logic that can only run on a certain server version you can't depend on your test block metadata to filter for that. + - So, for example, if your `before` hook does logic that can only run on a certain server version you can't depend on your test block metadata to filter for that. - `after` hooks cannot be used to clean up clients because the session leak checker currently runs in an `afterEach` hook, which would be executed before any `after` hook has a chance to run ### Skipping Tests Not all tests are able to run in all environments and some are unable to run at all due to known bugs. -When marking a test to be skiped, be sure to include a `skipReason`, so that it can be added to the test run printout. +When marking a test to be skipped, be sure to include a `skipReason`, so that it can be added to the test run printout. ```javascript // skipping an individual test @@ -196,15 +248,23 @@ We recommend using a different terminal for each specialized environment to avoi Before you begin any of the subsections below, clone the [drivers-evergreen-tools repo](https://github.com/mongodb-labs/drivers-evergreen-tools.git). -We recommend creating an environment variable named `DRIVERS_TOOLS` that stores the path to your local copy of the driver-evergreen-tools repo: `export DRIVERS_TOOLS="/path/to/your/copy/of/drivers-evergreen-tools"`. +We recommend creating an environment variable named `DRIVERS_TOOLS` that stores the path to your local copy of the `driver-evergreen-tools` repo: + +```sh +export DRIVERS_TOOLS="/path/to/your/copy/of/drivers-evergreen-tools" +``` ### Serverless The following steps will walk you through how to create and test a MongoDB Serverless instance. -1. Create the following environment variables using a command like `export PROJECT="node-driver"`. +1. Create the following environment variables using a command like: + + ```sh + export PROJECT="node-driver" + ``` - > Note: MongoDB employees can pull these values from the Evergreen project's configuration. + > **Note:** MongoDB employees can pull these values from the Evergreen project's configuration. | Variable Name | Description | | ---------------------------- | ---------------------------------------------------------------------------------------------------------------- | @@ -252,7 +312,11 @@ The following steps will walk you through how to create and test a MongoDB Serve 1. Source the environment variables using a command like `source serverless.env`. -1. Export **each** of the environment variables that were created in `serverless.env`. For example: `export SINGLE_MONGOS_LB_URI`. +1. Export **each** of the environment variables that were created in `serverless.env`. For example: + + ```sh + export SINGLE_MONGOS_LB_URI + ``` 1. Comment out the line in `.evergreen/run-serverless-tests.sh` that sources `install-dependencies.sh`. @@ -264,32 +328,38 @@ The following steps will walk you through how to create and test a MongoDB Serve The following steps will walk you through how to start and test a load balancer. -1. Start a sharded cluster with two mongos, so you have a URI similar to `MONGODB_URI=mongodb://host1,host2/`. The server must be version 5.2.0 or higher. +1. Start a sharded cluster with two `mongos`, so you have a URI similar to `MONGODB_URI=mongodb://host1,host2/`. The server must be version 5.2.0 or higher. Create the config server: - `mongod --configsvr --replSet test --dbpath config1 --bind_ip localhost --port 27217` + ```sh + mongod --configsvr --replSet test --dbpath config1 --bind_ip localhost --port 27217 + ``` Initiate the config server in the shell: - ```shell + ```sh mongosh "mongodb://localhost:27217" --eval "rs.initiate( { _id: 'test', configsvr: true, members: [ { _id: 0, host: 'localhost:27217' } ] })" ``` Create shard replica sets: - `mongod --shardsvr --replSet testing --dbpath repl1 --bind_ip localhost --port 27218 --setParameter enableTestCommands=true` - `mongod --shardsvr --replSet testing --dbpath repl2 --bind_ip localhost --port 27219 --setParameter enableTestCommands=true` - `mongod --shardsvr --replSet testing --dbpath repl3 --bind_ip localhost --port 27220 --setParameter enableTestCommands=true` + ```sh + mongod --shardsvr --replSet testing --dbpath repl1 --bind_ip localhost --port 27218 --setParameter enableTestCommands=true + mongod --shardsvr --replSet testing --dbpath repl2 --bind_ip localhost --port 27219 --setParameter enableTestCommands=true + mongod --shardsvr --replSet testing --dbpath repl3 --bind_ip localhost --port 27220 --setParameter enableTestCommands=true + ``` Initiate replica set in the shell: - ```shell + ```sh mongosh "mongodb://localhost:27218" --eval "rs.initiate( { _id: 'testing', members: [ { _id: 0, host: 'localhost:27218' }, { _id: 1, host: 'localhost:27219' }, { _id: 2, host: 'localhost:27220' }] })" ``` - Create two mongoses running on ports 27017 and 27018: - `mongos --configdb test/localhost:27217 --bind_ip localhost --setParameter enableTestCommands=1 --setParameter --setParameter loadBalancerPort=27050` - `mongos --configdb test/localhost:27217 --port 27018 --bind_ip localhost --setParameter enableTestCommands=1 --setParameter --setParameter loadBalancerPort=27051`. + Create two `mongos` running on ports `27017` and `27018`: + ```sh + mongos --configdb test/localhost:27217 --bind_ip localhost --setParameter enableTestCommands=1 --setParameter loadBalancerPort=27050 + mongos --configdb test/localhost:27217 --port 27018 --bind_ip localhost --setParameter enableTestCommands=1 --setParameter loadBalancerPort=27051 + ``` - Initiate cluster on mongos in shell: - ```shell - mongosh "mongodb://localhost:27017" --eval "sh.addShard('testing/localhost:27218,localhost:27219,localhost:27220')" + Initiate cluster on `mongos` in shell: + ```sh + mongosh "mongodb://localhost:27017" --eval "sh.addShard('testing/localhost:27218,localhost:27219,localhost:27220')" mongosh "mongodb://localhost:27017" --eval "sh.enableSharding('test')" ``` 1. An alternative way to the fully manual cluster setup is to use `mlaunch`: @@ -304,17 +374,23 @@ The following steps will walk you through how to start and test a load balancer. ``` When `mlaunch` has stopped the cluster, navigate to the `data` directory and edit the `.mlaunch_startup` file: - - Add `--setParameter \"loadBalancerPort=27050\"` to the first mongos configuration at the bottom of the file. - - Add `--setParameter \"loadBalancerPort=27051\"` to the second mongos configuration at the bottom of the file. + - Add `--setParameter loadBalancerPort=27050` to the first `mongos` configuration at the bottom of the file. + - Add `--setParameter loadBalancerPort=27051` to the second `mongos` configuration at the bottom of the file. Navigate back up to the root directory where `mlaunch` was initialized and restart: - ```shell + ```sh mlaunch start ``` -1. Create an environment variable named `MONGODB_URI` that stores the URI of the sharded cluster you just created. For example: `export MONGODB_URI="mongodb://host1,host2/"` -1. Install the HAProxy load balancer. For those on macOS, you can install HAProxy with `brew install haproxy`. -1. Start the load balancer by using the [run-load-balancer script](https://github.com/mongodb-labs/drivers-evergreen-tools/blob/master/.evergreen/run-load-balancer.sh) provided in drivers-evergreen-tools. +1. Create an environment variable named `MONGODB_URI` that stores the URI of the sharded cluster you just created. For example: + ```sh + export MONGODB_URI="mongodb://host1,host2/" + ``` +1. Install the HAProxy load balancer. For those on macOS, you can install HAProxy with: + ```sh + brew install haproxy + ``` +1. Start the load balancer by using the [run-load-balancer script](https://github.com/mongodb-labs/drivers-evergreen-tools/blob/master/.evergreen/run-load-balancer.sh) provided in `drivers-evergreen-tools`. ```sh $DRIVERS_TOOLS/.evergreen/run-load-balancer.sh start ``` @@ -329,9 +405,18 @@ The following steps will walk you through how to start and test a load balancer. ``` A new file name `lb.env` is automatically created. 1. Source the environment variables using a command like `source lb.env`. -1. Export **each** of the environment variables that were created in `lb.env`. For example: `export SINGLE_MONGOS_LB_URI`. -1. Export the `LOAD_BALANCER` environment variable to 'true': `export LOAD_BALANCER='true'` -1. Disable auth for tests: `export AUTH='noauth'` +1. Export **each** of the environment variables that were created in `lb.env`. For example: + ```sh + export SINGLE_MONGOS_LB_URI + ``` +1. Export the `LOAD_BALANCER` environment variable to `true`: + ```sh + export LOAD_BALANCER='true' + ``` +1. Disable auth for tests: + ```sh + export AUTH='noauth' + ``` 1. Run the test suite as you normally would: ```sh npm run check:test @@ -342,29 +427,32 @@ The following steps will walk you through how to start and test a load balancer. $DRIVERS_TOOLS/.evergreen/run-load-balancer.sh stop ``` -### Client-Side Field Level Encryption (CSFLE) +### Client-Side Field-Level Encryption (CSFLE) The following steps will walk you through how to run the tests for CSFLE. 1. Install [MongoDB Client Encryption][npm-csfle] if you haven't already: - `npm install mongodb-client-encryption`. Note: if developing changes in `mongodb-client-encryption`, + ```sh + npm install mongodb-client-encryption + ``` + > **Note:** if developing changes in `mongodb-client-encryption`, you can link it locally using `etc/tooling/fle.sh`. -1. Create the following environment variables using a command like `export AWS_REGION="us-east-1"`. - - > Note: MongoDB employees can pull these values from the Evergreen project's configuration. - - | Variable Name | Description | - | ----------------------- | ------------------------------------------------------------------------------------------- | - | `AWS_ACCESS_KEY_ID` | The AWS access key ID used to generate KMS messages | - | `AWS_SECRET_ACCESS_KEY` | The AWS secret access key used to generate KMS messages | - | `AWS_REGION` | The AWS region where the KMS resides (e.g., `us-east-1`) | - | `AWS_CMK_ID` | The Customer Master Key for the KMS | - | `CSFLE_KMS_PROVIDERS` | The raw EJSON description of the KMS providers. An example of the format is provided below. | - | KMIP_TLS_CA_FILE | /path/to/mongodb-labs/drivers-evergreen-tools/.evergreen/x509gen/ca.pem - | - | KMIP_TLS_CERT_FILE | /path/to/mongodb-labs/drivers-evergreen-tools/.evergreen/x509gen/client.pem - +1. Create the following environment variables using a command like: + ```sh + export AWS_REGION="us-east-1" + ``` + > **Note:** MongoDB employees can pull these values from the Evergreen project's configuration. + + | Variable Name |Description | + | -----------------------|---------------------------------------------------------------- | + | `AWS_ACCESS_KEY_ID` | The AWS access key ID used to generate KMS messages | + | `AWS_SECRET_ACCESS_KEY`| The AWS secret access key used to generate KMS messages | + | `AWS_REGION` | The AWS region where the KMS resides (e.g., `us-east-1`) | + | `AWS_CMK_ID` | The Customer Master Key for the KMS | + | `CSFLE_KMS_PROVIDERS` | The raw EJSON description of the KMS providers. An example of the format is provided below. | + | `KMIP_TLS_CA_FILE` | /path/to/mongodb-labs/drivers-evergreen-tools/.evergreen/x509gen/ca.pem| + | `KMIP_TLS_CERT_FILE` | /path/to/mongodb-labs/drivers-evergreen-tools/.evergreen/x509gen/client.pem | The value of the `CSFLE_KMS_PROVIDERS` variable will have the following format: @@ -392,83 +480,89 @@ The following steps will walk you through how to run the tests for CSFLE. ``` 1. Start the KMIP servers: - `DRIVERS_TOOLS="/path/to/mongodb-labs/drivers-evergreen-tools" .evergreen/run-kms-servers.sh` + ```sh + DRIVERS_TOOLS="/path/to/mongodb-labs/drivers-evergreen-tools" .evergreen/run-kms-servers.sh + ``` -1. Ensure default ~/.aws/config is present: +1. Ensure default `~/.aws/config` is present: - ``` - [default] - aws_access_key_id=AWS_ACCESS_KEY_ID - aws_secret_access_key=AWS_SECRET_ACCESS_KEY - ``` + ``` + [default] + aws_access_key_id=AWS_ACCESS_KEY_ID + aws_secret_access_key=AWS_SECRET_ACCESS_KEY + ``` 1. Set temporary AWS credentials - ``` - pip3 install boto3 - PYTHON="python3" source /path/to/mongodb-labs/drivers-evergreen-tools/.evergreen/csfle/set-temp-creds.sh - ``` + ``` + pip3 install boto3 + PYTHON="python3" source /path/to/mongodb-labs/drivers-evergreen-tools/.evergreen/csfle/set-temp-creds.sh + ``` - Alternatively for fish users the following script can be substituted for set-temp-creds.sh: + Alternatively, for fish users, the following script can be substituted for `set-temp-creds.sh`: - ```fish - function set_aws_creds - set PYTHON_SCRIPT "\ - import boto3 - client = boto3.client('sts') - credentials = client.get_session_token()['Credentials'] - print (credentials['AccessKeyId'] + ' ' + credentials['SecretAccessKey'] + ' ' + credentials['SessionToken'])" + ```fish + function set_aws_creds + set PYTHON_SCRIPT "\ + import boto3 + client = boto3.client('sts') + credentials = client.get_session_token()['Credentials'] + print (credentials['AccessKeyId'] + ' ' + credentials['SecretAccessKey'] + ' ' + credentials['SessionToken'])" - echo $PYTHON_SCRIPT | python3 - - end + echo $PYTHON_SCRIPT | python3 - + end - set CREDS (set_aws_creds) + set CREDS (set_aws_creds) - set CSFLE_AWS_TEMP_ACCESS_KEY_ID (echo $CREDS | awk '{print $1}') - set CSFLE_AWS_TEMP_SECRET_ACCESS_KEY (echo $CREDS | awk '{print $2}') - set CSFLE_AWS_TEMP_SESSION_TOKEN (echo $CREDS | awk '{print $3}') + set CSFLE_AWS_TEMP_ACCESS_KEY_ID (echo $CREDS | awk '{print $1}') + set CSFLE_AWS_TEMP_SECRET_ACCESS_KEY (echo $CREDS | awk '{print $2}') + set CSFLE_AWS_TEMP_SESSION_TOKEN (echo $CREDS | awk '{print $3}') - set -e CREDS - ``` + set -e CREDS + ``` 1. Run the functional tests: + ```sh + npm run check:test + ``` - `npm run check:test` - - The output of the tests will include sections like "Client Side Encryption Corpus," "Client Side Encryption Functional," "Client Side Encryption Prose Tests," and "Client Side Encryption." + The output of the tests will include sections like "Client-Side Encryption Corpus", "Client-Side Encryption Functional", "Client-Side Encryption Prose Tests", and "Client-Side Encryption". - To run the functional tests using the crypt shared library instead of mongocryptd, download the appropriate version of the crypt shared library for the enterprise server version [here](https://www.mongodb.com/download-center/enterprise/releases) and then set the location of it in the environment variable `CRYPT_SHARED_LIB_PATH`. + To run the functional tests using the crypt shared library instead of `mongocryptd`, download the appropriate version of the crypt shared library for the enterprise server version [here](https://www.mongodb.com/download-center/enterprise/releases) and then set the location of it in the environment variable `CRYPT_SHARED_LIB_PATH`. #### Testing driver changes with mongosh -These steps require mongosh to be available locally. Clone it from Github. +These steps require `mongosh` to be available locally. Clone it from GitHub. -Mongosh uses a lerna monorepo. As a result, mongosh contains multiple references to the `mongodb` package +`mongosh` uses a `lerna` monorepo. As a result, `mongosh` contains multiple references to the `mongodb` package in their `package.json`s. -Set up mongosh by following the steps in the mongosh readme. +Set up `mongosh` by following the steps in the `mongosh` readme. ##### Point mongosh to the driver -mongosh contains a script that does this. To use the script, create an environment +mongosh contains a script that does this. To use the script, create an environment variable `REPLACE_PACKAGE` that contains a string in the form -`mongodb:`. The package replacement script will replace +`mongodb:`. The package replacement script will replace all occurrences of `mongodb` with the local path of your driver. An alternative, which can be useful for -testing a release, is to first run `npm pack` on the driver. This generates a tarball containing all the code -that would be uploaded to npm if it were released. Then set the environment variable `REPLACE_PACKAGE` -with the pull path to the file. +testing a release, is to first run `npm pack` on the driver. This generates a tarball containing all the code +that would be uploaded to `npm` if it were released. Then, set the environment variable `REPLACE_PACKAGE` +with the full path to the file. -Once the environment variable is set, run replace package in mongosh with `npm run replace:package`. +Once the environment variable is set, run replace package in `mongosh` with: +```sh +npm run replace:package +``` ##### Run specific package tests -mongosh's readme documents how to run its tests. Most likely, it isn't necessary to run all of mongosh's -tests. The mongosh readme also documents how to run tests for a particular scope. The scopes are +`mongosh`'s readme documents how to run its tests. Most likely, it isn't necessary to run all of mongosh's +tests. The `mongosh` readme also documents how to run tests for a particular scope. The scopes are listed in the `generate_mongosh_tasks.js` evergreen generation script. -For example, to run the `service-provider-server` package, run the following command in mongosh: +For example, to run the `service-provider-server` package, run the following command in `mongosh`: ```shell lerna run test --scope @mongosh/service-provider-server @@ -476,20 +570,43 @@ lerna run test --scope @mongosh/service-provider-server #### KMIP FLE support tests -1. Install virtualenv: `pip install virtualenv` -2. Source the ./activate-kmstlsvenv.sh script in driver evergreen tools `.evergreen/csfle/activate-kmstlsvenv.sh` - 1. This will install all the dependencies needed to run a python kms_kmip simulated server -3. In 4 separate terminals launch the following: - - `./kmstlsvenv/bin/python3 -u kms_kmip_server.py` # by default it always runs on port 5698 - - `./kmstlsvenv/bin/python3 -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/expired.pem --port 8000` - - `./kmstlsvenv/bin/python3 -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/wrong-host.pem --port 8001` - - `./kmstlsvenv/bin/python3 -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/server.pem --port 8002 --require_client_cert` +1. Install `virtualenv`: + ```sh + pip install virtualenv + ``` +2. Source the `./activate-kmstlsvenv.sh` script in driver evergreen tools `.evergreen/csfle/activate-kmstlsvenv.sh` + - This will install all the dependencies needed to run a Python kms_kmip simulated server +3. In four separate terminals, launch the following: + ```sh + ./kmstlsvenv/bin/python3 -u kms_kmip_server.py` # by default it always runs on port 5698 + ``` + ```sh + ./kmstlsvenv/bin/python3 -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/expired.pem --port 8000 + ``` + ```sh + ./kmstlsvenv/bin/python3 -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/wrong-host.pem --port 8001 + ``` + ```sh + ./kmstlsvenv/bin/python3 -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/server.pem --port 8002 --require_client_cert + ``` 4. Set the following environment variables: - - `export KMIP_TLS_CA_FILE="${DRIVERS_TOOLS}/.evergreen/x509gen/ca.pem"` - - `export KMIP_TLS_CERT_FILE="${DRIVERS_TOOLS}/.evergreen/x509gen/client.pem"` -5. Install the FLE lib: `npm i --no-save mongodb-client-encryption` -6. Launch a mongodb server -7. Run the full suite `npm run check:test` or more specifically `npx mocha --config test/mocha_mongodb.json test/integration/client-side-encryption/` + ```sh + export KMIP_TLS_CA_FILE="${DRIVERS_TOOLS}/.evergreen/x509gen/ca.pem" + export KMIP_TLS_CERT_FILE="${DRIVERS_TOOLS}/.evergreen/x509gen/client.pem" + ``` +5. Install the FLE lib: + ```sh + npm i --no-save mongodb-client-encryption + ``` +6. Launch a MongoDB server +7. Run the full suite: + ```sh + npm run check:test + ``` + or more specifically + ```sh + npx mocha --config test/mocha_mongodb.json test/integration/client-side-encryption/ + ``` ### TODO Special Env Sections diff --git a/test/spec/auth/legacy/connection-string.json b/test/spec/auth/legacy/connection-string.json index fcb2dbf57d3..5b54e2aadd2 100644 --- a/test/spec/auth/legacy/connection-string.json +++ b/test/spec/auth/legacy/connection-string.json @@ -481,10 +481,9 @@ } } }, - { - "description": "should recognise the mechanism and request callback (MONGODB-OIDC)", - "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC", - "callback": ["oidcRequest"], + { + "description": "should recognise the mechanism with test environment (MONGODB-OIDC)", + "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:test", "valid": true, "credential": { "username": null, @@ -492,14 +491,13 @@ "source": "$external", "mechanism": "MONGODB-OIDC", "mechanism_properties": { - "REQUEST_TOKEN_CALLBACK": true + "ENVIRONMENT": "test" } } }, { - "description": "should recognise the mechanism when auth source is explicitly specified and with request callback (MONGODB-OIDC)", - "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authSource=$external", - "callback": ["oidcRequest"], + "description": "should recognise the mechanism when auth source is explicitly specified and with environment (MONGODB-OIDC)", + "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authSource=$external&authMechanismProperties=ENVIRONMENT:test", "valid": true, "credential": { "username": null, @@ -507,14 +505,43 @@ "source": "$external", "mechanism": "MONGODB-OIDC", "mechanism_properties": { - "REQUEST_TOKEN_CALLBACK": true + "ENVIRONMENT": "test" } } }, { - "description": "should recognise the mechanism with request and refresh callback (MONGODB-OIDC)", + "description": "should throw an exception if supplied a password (MONGODB-OIDC)", + "uri": "mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:test", + "valid": false, + "credential": null + }, + { + "description": "should throw an exception if username is specified for test (MONGODB-OIDC)", + "uri": "mongodb://principalName@localhost/?authMechanism=MONGODB-OIDC&ENVIRONMENT:test", + "valid": false, + "credential": null + }, + { + "description": "should throw an exception if specified environment is not supported (MONGODB-OIDC)", + "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:invalid", + "valid": false, + "credential": null + }, + { + "description": "should throw an exception if neither environment nor callbacks specified (MONGODB-OIDC)", "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC", - "callback": ["oidcRequest", "oidcRefresh"], + "valid": false, + "credential": null + }, + { + "description": "should throw an exception when unsupported auth property is specified (MONGODB-OIDC)", + "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=UnsupportedProperty:unexisted", + "valid": false, + "credential": null + }, + { + "description": "should recognise the mechanism with azure provider (MONGODB-OIDC)", + "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:foo", "valid": true, "credential": { "username": null, @@ -522,91 +549,124 @@ "source": "$external", "mechanism": "MONGODB-OIDC", "mechanism_properties": { - "REQUEST_TOKEN_CALLBACK": true, - "REFRESH_TOKEN_CALLBACK": true + "ENVIRONMENT": "azure", + "TOKEN_RESOURCE": "foo" } } }, { - "description": "should recognise the mechanism and username with request callback (MONGODB-OIDC)", - "uri": "mongodb://principalName@localhost/?authMechanism=MONGODB-OIDC", - "callback": ["oidcRequest"], + "description": "should accept a username with azure provider (MONGODB-OIDC)", + "uri": "mongodb://user@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:foo", "valid": true, "credential": { - "username": "principalName", + "username": "user", "password": null, "source": "$external", "mechanism": "MONGODB-OIDC", "mechanism_properties": { - "REQUEST_TOKEN_CALLBACK": true + "ENVIRONMENT": "azure", + "TOKEN_RESOURCE": "foo" } } }, { - "description": "should recognise the mechanism with aws device (MONGODB-OIDC)", - "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:aws", + "description": "should accept a url-encoded TOKEN_RESOURCE (MONGODB-OIDC)", + "uri": "mongodb://user@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:mongodb%3A%2F%2Ftest-cluster", "valid": true, "credential": { - "username": null, + "username": "user", "password": null, "source": "$external", "mechanism": "MONGODB-OIDC", "mechanism_properties": { - "PROVIDER_NAME": "aws" + "ENVIRONMENT": "azure", + "TOKEN_RESOURCE": "mongodb://test-cluster" } } }, { - "description": "should recognise the mechanism when auth source is explicitly specified and with aws device (MONGODB-OIDC)", - "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authSource=$external&authMechanismProperties=PROVIDER_NAME:aws", + "description": "should accept an un-encoded TOKEN_RESOURCE (MONGODB-OIDC)", + "uri": "mongodb://user@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:mongodb://test-cluster", "valid": true, "credential": { - "username": null, + "username": "user", "password": null, "source": "$external", "mechanism": "MONGODB-OIDC", "mechanism_properties": { - "PROVIDER_NAME": "aws" + "ENVIRONMENT": "azure", + "TOKEN_RESOURCE": "mongodb://test-cluster" } } }, { - "description": "should throw an exception if username and password are specified (MONGODB-OIDC)", - "uri": "mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC", - "callback": ["oidcRequest"], - "valid": false, - "credential": null + "description": "should handle a complicated url-encoded TOKEN_RESOURCE (MONGODB-OIDC)", + "uri": "mongodb://user@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:abcd%25ef%3Ag%26hi", + "valid": true, + "credential": { + "username": "user", + "password": null, + "source": "$external", + "mechanism": "MONGODB-OIDC", + "mechanism_properties": { + "ENVIRONMENT": "azure", + "TOKEN_RESOURCE": "abcd%ef:g&hi" + } + } }, { - "description": "should throw an exception if username and deviceName are specified (MONGODB-OIDC)", - "uri": "mongodb://principalName@localhost/?authMechanism=MONGODB-OIDC&PROVIDER_NAME:gcp", - "valid": false, - "credential": null + "description": "should url-encode a TOKEN_RESOURCE (MONGODB-OIDC)", + "uri": "mongodb://user@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:a$b", + "valid": true, + "credential": { + "username": "user", + "password": null, + "source": "$external", + "mechanism": "MONGODB-OIDC", + "mechanism_properties": { + "ENVIRONMENT": "azure", + "TOKEN_RESOURCE": "a$b" + } + } }, { - "description": "should throw an exception if specified deviceName is not supported (MONGODB-OIDC)", - "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:unexisted", + "description": "should accept a username and throw an error for a password with azure provider (MONGODB-OIDC)", + "uri": "mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:foo", "valid": false, "credential": null }, { - "description": "should throw an exception if neither deviceName nor callbacks specified (MONGODB-OIDC)", - "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC", + "description": "should throw an exception if no token audience is given for azure provider (MONGODB-OIDC)", + "uri": "mongodb://username@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure", "valid": false, "credential": null }, { - "description": "should throw an exception when only refresh callback is specified (MONGODB-OIDC)", - "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC", - "callback": ["oidcRefresh"], + "description": "should recognise the mechanism with gcp provider (MONGODB-OIDC)", + "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:gcp,TOKEN_RESOURCE:foo", + "valid": true, + "credential": { + "username": null, + "password": null, + "source": "$external", + "mechanism": "MONGODB-OIDC", + "mechanism_properties": { + "ENVIRONMENT": "gcp", + "TOKEN_RESOURCE": "foo" + } + } + }, + { + "description": "should throw an error for a username and password with gcp provider (MONGODB-OIDC)", + "uri": "mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:gcp,TOKEN_RESOURCE:foo", "valid": false, "credential": null }, { - "description": "should throw an exception when unsupported auth property is specified (MONGODB-OIDC)", - "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=UnsupportedProperty:unexisted", + "description": "should throw an error if not TOKEN_RESOURCE with gcp provider (MONGODB-OIDC)", + "uri": "mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:gcp", "valid": false, "credential": null } ] -} +} \ No newline at end of file diff --git a/test/spec/auth/legacy/connection-string.yml b/test/spec/auth/legacy/connection-string.yml index 9f8aab4a725..a9651133959 100644 --- a/test/spec/auth/legacy/connection-string.yml +++ b/test/spec/auth/legacy/connection-string.yml @@ -350,10 +350,8 @@ tests: mechanism: MONGODB-AWS mechanism_properties: AWS_SESSION_TOKEN: token!@#$%^&*()_+ -- description: should recognise the mechanism and request callback (MONGODB-OIDC) - uri: mongodb://localhost/?authMechanism=MONGODB-OIDC - callback: - - oidcRequest +- description: should recognise the mechanism with test environment (MONGODB-OIDC) + uri: mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:test valid: true credential: username: @@ -361,12 +359,9 @@ tests: source: "$external" mechanism: MONGODB-OIDC mechanism_properties: - REQUEST_TOKEN_CALLBACK: true -- description: should recognise the mechanism when auth source is explicitly specified - and with request callback (MONGODB-OIDC) - uri: mongodb://localhost/?authMechanism=MONGODB-OIDC&authSource=$external - callback: - - oidcRequest + ENVIRONMENT: test +- description: should recognise the mechanism when auth source is explicitly specified and with environment (MONGODB-OIDC) + uri: mongodb://localhost/?authMechanism=MONGODB-OIDC&authSource=$external&authMechanismProperties=ENVIRONMENT:test valid: true credential: username: @@ -374,83 +369,118 @@ tests: source: "$external" mechanism: MONGODB-OIDC mechanism_properties: - REQUEST_TOKEN_CALLBACK: true -- description: should recognise the mechanism with request and refresh callback (MONGODB-OIDC) + ENVIRONMENT: test +- description: should throw an exception if supplied a password (MONGODB-OIDC) + uri: mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:test + valid: false + credential: +- description: should throw an exception if username is specified for test (MONGODB-OIDC) + uri: mongodb://principalName@localhost/?authMechanism=MONGODB-OIDC&ENVIRONMENT:test + valid: false + credential: +- description: should throw an exception if specified environment is not supported (MONGODB-OIDC) + uri: mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:invalid + valid: false + credential: +- description: should throw an exception if neither environment nor callbacks specified (MONGODB-OIDC) uri: mongodb://localhost/?authMechanism=MONGODB-OIDC - callback: - - oidcRequest - - oidcRefresh + valid: false + credential: +- description: should throw an exception when unsupported auth property is specified (MONGODB-OIDC) + uri: mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=UnsupportedProperty:unexisted + valid: false + credential: +- description: should recognise the mechanism with azure provider (MONGODB-OIDC) + uri: mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:foo valid: true credential: - username: - password: - source: "$external" + username: null + password: null + source: $external mechanism: MONGODB-OIDC mechanism_properties: - REQUEST_TOKEN_CALLBACK: true - REFRESH_TOKEN_CALLBACK: true -- description: should recognise the mechanism and username with request callback (MONGODB-OIDC) - uri: mongodb://principalName@localhost/?authMechanism=MONGODB-OIDC - callback: - - oidcRequest + ENVIRONMENT: azure + TOKEN_RESOURCE: foo +- description: should accept a username with azure provider (MONGODB-OIDC) + uri: mongodb://user@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:foo valid: true credential: - username: principalName - password: - source: "$external" + username: user + password: null + source: $external mechanism: MONGODB-OIDC mechanism_properties: - REQUEST_TOKEN_CALLBACK: true -- description: should recognise the mechanism with aws device (MONGODB-OIDC) - uri: mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:aws + ENVIRONMENT: azure + TOKEN_RESOURCE: foo +- description: should accept a url-encoded TOKEN_RESOURCE (MONGODB-OIDC) + uri: mongodb://user@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:mongodb%3A%2F%2Ftest-cluster valid: true credential: - username: - password: - source: "$external" + username: user + password: null + source: $external mechanism: MONGODB-OIDC mechanism_properties: - PROVIDER_NAME: aws -- description: should recognise the mechanism when auth source is explicitly specified - and with aws device (MONGODB-OIDC) - uri: mongodb://localhost/?authMechanism=MONGODB-OIDC&authSource=$external&authMechanismProperties=PROVIDER_NAME:aws + ENVIRONMENT: azure + TOKEN_RESOURCE: 'mongodb://test-cluster' +- description: should accept an un-encoded TOKEN_RESOURCE (MONGODB-OIDC) + uri: mongodb://user@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:mongodb://test-cluster valid: true credential: - username: - password: - source: "$external" + username: user + password: null + source: $external mechanism: MONGODB-OIDC mechanism_properties: - PROVIDER_NAME: aws -- description: should throw an exception if username and password are specified (MONGODB-OIDC) - uri: mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC - callback: - - oidcRequest - valid: false - credential: -- description: should throw an exception if username and deviceName are specified - (MONGODB-OIDC) - uri: mongodb://principalName@localhost/?authMechanism=MONGODB-OIDC&PROVIDER_NAME:gcp - valid: false + ENVIRONMENT: azure + TOKEN_RESOURCE: 'mongodb://test-cluster' +- description: should handle a complicated url-encoded TOKEN_RESOURCE (MONGODB-OIDC) + uri: mongodb://user@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:abcd%25ef%3Ag%26hi + valid: true credential: -- description: should throw an exception if specified deviceName is not supported - (MONGODB-OIDC) - uri: mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:unexisted - valid: false + username: user + password: null + source: $external + mechanism: MONGODB-OIDC + mechanism_properties: + ENVIRONMENT: azure + TOKEN_RESOURCE: 'abcd%ef:g&hi' +- description: should url-encode a TOKEN_RESOURCE (MONGODB-OIDC) + uri: mongodb://user@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:a$b + valid: true credential: -- description: should throw an exception if neither deviceName nor callbacks specified - (MONGODB-OIDC) - uri: mongodb://localhost/?authMechanism=MONGODB-OIDC + username: user + password: null + source: $external + mechanism: MONGODB-OIDC + mechanism_properties: + ENVIRONMENT: azure + TOKEN_RESOURCE: a$b +- description: should accept a username and throw an error for a password with azure provider (MONGODB-OIDC) + uri: mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:foo valid: false - credential: -- description: should throw an exception when only refresh callback is specified (MONGODB-OIDC) - uri: mongodb://localhost/?authMechanism=MONGODB-OIDC - callback: - - oidcRefresh + credential: null +- description: should throw an exception if no token audience is given for azure provider (MONGODB-OIDC) + uri: mongodb://username@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure valid: false + credential: null +- description: should recognise the mechanism with gcp provider (MONGODB-OIDC) + uri: mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:gcp,TOKEN_RESOURCE:foo + valid: true credential: -- description: should throw an exception when unsupported auth property is specified + username: null + password: null + source: $external + mechanism: MONGODB-OIDC + mechanism_properties: + ENVIRONMENT: gcp + TOKEN_RESOURCE: foo +- description: should throw an error for a username and password with gcp provider (MONGODB-OIDC) - uri: mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=UnsupportedProperty:unexisted + uri: mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:gcp,TOKEN_RESOURCE:foo valid: false - credential: + credential: null +- description: should throw an error if not TOKEN_RESOURCE with gcp provider (MONGODB-OIDC) + uri: mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:gcp + valid: false + credential: null diff --git a/test/spec/auth/unified/mongodb-oidc-no-retry.json b/test/spec/auth/unified/mongodb-oidc-no-retry.json new file mode 100644 index 00000000000..9dbe1982704 --- /dev/null +++ b/test/spec/auth/unified/mongodb-oidc-no-retry.json @@ -0,0 +1,421 @@ +{ + "description": "MONGODB-OIDC authentication with retry disabled", + "schemaVersion": "1.19", + "runOnRequirements": [ + { + "minServerVersion": "7.0", + "auth": true, + "authMechanism": "MONGODB-OIDC" + } + ], + "createEntities": [ + { + "client": { + "id": "failPointClient", + "useMultipleMongoses": false + } + }, + { + "client": { + "id": "client0", + "uriOptions": { + "authMechanism": "MONGODB-OIDC", + "authMechanismProperties": { + "$$placeholder": 1 + }, + "retryReads": false, + "retryWrites": false + }, + "observeEvents": [ + "commandStartedEvent", + "commandSucceededEvent", + "commandFailedEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "test" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "collName" + } + } + ], + "initialData": [ + { + "collectionName": "collName", + "databaseName": "test", + "documents": [] + } + ], + "tests": [ + { + "description": "A read operation should succeed", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": {} + }, + "expectResult": [] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "find": "collName", + "filter": {} + } + } + }, + { + "commandSucceededEvent": { + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "A write operation should succeed", + "operations": [ + { + "name": "insertOne", + "object": "collection0", + "arguments": { + "document": { + "_id": 1, + "x": 1 + } + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "insert": "collName", + "documents": [ + { + "_id": 1, + "x": 1 + } + ] + } + } + }, + { + "commandSucceededEvent": { + "commandName": "insert" + } + } + ] + } + ] + }, + { + "description": "Read commands should reauthenticate and retry when a ReauthenticationRequired error happens", + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "failPointClient", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "find" + ], + "errorCode": 391 + } + } + } + }, + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": {} + }, + "expectResult": [] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "find": "collName", + "filter": {} + } + } + }, + { + "commandFailedEvent": { + "commandName": "find" + } + }, + { + "commandStartedEvent": { + "command": { + "find": "collName", + "filter": {} + } + } + }, + { + "commandSucceededEvent": { + "commandName": "find" + } + } + ] + } + ] + }, + { + "description": "Write commands should reauthenticate and retry when a ReauthenticationRequired error happens", + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "failPointClient", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "insert" + ], + "errorCode": 391 + } + } + } + }, + { + "name": "insertOne", + "object": "collection0", + "arguments": { + "document": { + "_id": 1, + "x": 1 + } + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "insert": "collName", + "documents": [ + { + "_id": 1, + "x": 1 + } + ] + } + } + }, + { + "commandFailedEvent": { + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "collName", + "documents": [ + { + "_id": 1, + "x": 1 + } + ] + } + } + }, + { + "commandSucceededEvent": { + "commandName": "insert" + } + } + ] + } + ] + }, + { + "description": "Handshake with cached token should use speculative authentication", + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "failPointClient", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "insert" + ], + "closeConnection": true + } + } + } + }, + { + "name": "insertOne", + "object": "collection0", + "arguments": { + "document": { + "_id": 1, + "x": 1 + } + }, + "expectError": { + "isClientError": true + } + }, + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "failPointClient", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "saslStart" + ], + "errorCode": 18 + } + } + } + }, + { + "name": "insertOne", + "object": "collection0", + "arguments": { + "document": { + "_id": 1, + "x": 1 + } + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "insert": "collName", + "documents": [ + { + "_id": 1, + "x": 1 + } + ] + } + } + }, + { + "commandFailedEvent": { + "commandName": "insert" + } + }, + { + "commandStartedEvent": { + "command": { + "insert": "collName", + "documents": [ + { + "_id": 1, + "x": 1 + } + ] + } + } + }, + { + "commandSucceededEvent": { + "commandName": "insert" + } + } + ] + } + ] + }, + { + "description": "Handshake without cached token should not use speculative authentication", + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "failPointClient", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "saslStart" + ], + "errorCode": 18 + } + } + } + }, + { + "name": "insertOne", + "object": "collection0", + "arguments": { + "document": { + "_id": 1, + "x": 1 + } + }, + "expectError": { + "errorCode": 18 + } + } + ] + } + ] +} diff --git a/test/spec/auth/unified/mongodb-oidc-no-retry.yml b/test/spec/auth/unified/mongodb-oidc-no-retry.yml new file mode 100644 index 00000000000..426fd72466c --- /dev/null +++ b/test/spec/auth/unified/mongodb-oidc-no-retry.yml @@ -0,0 +1,228 @@ +--- +description: "MONGODB-OIDC authentication with retry disabled" +schemaVersion: "1.19" +runOnRequirements: +- minServerVersion: "7.0" + auth: true + authMechanism: "MONGODB-OIDC" +createEntities: +- client: + id: &failPointClient failPointClient + useMultipleMongoses: false +- client: + id: client0 + uriOptions: + authMechanism: "MONGODB-OIDC" + # The $$placeholder document should be replaced by auth mechanism + # properties that enable OIDC auth on the target cloud platform. For + # example, when running the test on EC2, replace the $$placeholder + # document with {"ENVIRONMENT": "test"}. + authMechanismProperties: { $$placeholder: 1 } + retryReads: false + retryWrites: false + observeEvents: + - commandStartedEvent + - commandSucceededEvent + - commandFailedEvent +- database: + id: database0 + client: client0 + databaseName: test +- collection: + id: collection0 + database: database0 + collectionName: collName +initialData: +- collectionName: collName + databaseName: test + documents: [] +tests: +- description: A read operation should succeed + operations: + - name: find + object: collection0 + arguments: + filter: {} + expectResult: [] + expectEvents: + - client: client0 + events: + - commandStartedEvent: + command: + find: collName + filter: {} + - commandSucceededEvent: + commandName: find +- description: A write operation should succeed + operations: + - name: insertOne + object: collection0 + arguments: + document: + _id: 1 + x: 1 + expectEvents: + - client: client0 + events: + - commandStartedEvent: + command: + insert: collName + documents: + - _id: 1 + x: 1 + - commandSucceededEvent: + commandName: insert +- description: Read commands should reauthenticate and retry when a ReauthenticationRequired error happens + operations: + - name: failPoint + object: testRunner + arguments: + client: failPointClient + failPoint: + configureFailPoint: failCommand + mode: + times: 1 + data: + failCommands: + - find + errorCode: 391 # ReauthenticationRequired + - name: find + object: collection0 + arguments: + filter: {} + expectResult: [] + expectEvents: + - client: client0 + events: + - commandStartedEvent: + command: + find: collName + filter: {} + - commandFailedEvent: + commandName: find + - commandStartedEvent: + command: + find: collName + filter: {} + - commandSucceededEvent: + commandName: find +- description: Write commands should reauthenticate and retry when a ReauthenticationRequired error happens + operations: + - name: failPoint + object: testRunner + arguments: + client: failPointClient + failPoint: + configureFailPoint: failCommand + mode: + times: 1 + data: + failCommands: + - insert + errorCode: 391 # ReauthenticationRequired + - name: insertOne + object: collection0 + arguments: + document: + _id: 1 + x: 1 + expectEvents: + - client: client0 + events: + - commandStartedEvent: + command: + insert: collName + documents: + - _id: 1 + x: 1 + - commandFailedEvent: + commandName: insert + - commandStartedEvent: + command: + insert: collName + documents: + - _id: 1 + x: 1 + - commandSucceededEvent: + commandName: insert +- description: Handshake with cached token should use speculative authentication + operations: + - name: failPoint + object: testRunner + arguments: + client: failPointClient + failPoint: + configureFailPoint: failCommand + mode: + times: 1 + data: + failCommands: + - insert + closeConnection: true + - name: insertOne + object: collection0 + arguments: + document: + _id: 1 + x: 1 + expectError: + isClientError: true + - name: failPoint + object: testRunner + arguments: + client: failPointClient + failPoint: + configureFailPoint: failCommand + mode: + times: 1 + data: + failCommands: + - saslStart + errorCode: 18 + - name: insertOne + object: collection0 + arguments: + document: + _id: 1 + x: 1 + expectEvents: + - client: client0 + events: + - commandStartedEvent: + command: + insert: collName + documents: + - _id: 1 + x: 1 + - commandFailedEvent: + commandName: insert + - commandStartedEvent: + command: + insert: collName + documents: + - _id: 1 + x: 1 + - commandSucceededEvent: + commandName: insert +- description: Handshake without cached token should not use speculative authentication + operations: + - name: failPoint + object: testRunner + arguments: + client: failPointClient + failPoint: + configureFailPoint: failCommand + mode: + times: 1 + data: + failCommands: + - saslStart + errorCode: 18 + - name: insertOne + object: collection0 + arguments: + document: + _id: 1 + x: 1 + expectError: + errorCode: 18 \ No newline at end of file diff --git a/test/spec/auth/unified/reauthenticate_with_retry.json b/test/spec/auth/unified/reauthenticate_with_retry.json deleted file mode 100644 index ef110562ede..00000000000 --- a/test/spec/auth/unified/reauthenticate_with_retry.json +++ /dev/null @@ -1,191 +0,0 @@ -{ - "description": "reauthenticate_with_retry", - "schemaVersion": "1.12", - "runOnRequirements": [ - { - "minServerVersion": "6.3", - "auth": true - } - ], - "createEntities": [ - { - "client": { - "id": "client0", - "uriOptions": { - "retryReads": true, - "retryWrites": true - }, - "observeEvents": [ - "commandStartedEvent", - "commandSucceededEvent", - "commandFailedEvent" - ] - } - }, - { - "database": { - "id": "database0", - "client": "client0", - "databaseName": "db" - } - }, - { - "collection": { - "id": "collection0", - "database": "database0", - "collectionName": "collName" - } - } - ], - "initialData": [ - { - "collectionName": "collName", - "databaseName": "db", - "documents": [] - } - ], - "tests": [ - { - "description": "Read command should reauthenticate when receive ReauthenticationRequired error code and retryReads=true", - "operations": [ - { - "name": "failPoint", - "object": "testRunner", - "arguments": { - "client": "client0", - "failPoint": { - "configureFailPoint": "failCommand", - "mode": { - "times": 1 - }, - "data": { - "failCommands": [ - "find" - ], - "errorCode": 391 - } - } - } - }, - { - "name": "find", - "arguments": { - "filter": {} - }, - "object": "collection0", - "expectResult": [] - } - ], - "expectEvents": [ - { - "client": "client0", - "events": [ - { - "commandStartedEvent": { - "command": { - "find": "collName", - "filter": {} - } - } - }, - { - "commandFailedEvent": { - "commandName": "find" - } - }, - { - "commandStartedEvent": { - "command": { - "find": "collName", - "filter": {} - } - } - }, - { - "commandSucceededEvent": { - "commandName": "find" - } - } - ] - } - ] - }, - { - "description": "Write command should reauthenticate when receive ReauthenticationRequired error code and retryWrites=true", - "operations": [ - { - "name": "failPoint", - "object": "testRunner", - "arguments": { - "client": "client0", - "failPoint": { - "configureFailPoint": "failCommand", - "mode": { - "times": 1 - }, - "data": { - "failCommands": [ - "insert" - ], - "errorCode": 391 - } - } - } - }, - { - "name": "insertOne", - "object": "collection0", - "arguments": { - "document": { - "_id": 1, - "x": 1 - } - } - } - ], - "expectEvents": [ - { - "client": "client0", - "events": [ - { - "commandStartedEvent": { - "command": { - "insert": "collName", - "documents": [ - { - "_id": 1, - "x": 1 - } - ] - } - } - }, - { - "commandFailedEvent": { - "commandName": "insert" - } - }, - { - "commandStartedEvent": { - "command": { - "insert": "collName", - "documents": [ - { - "_id": 1, - "x": 1 - } - ] - } - } - }, - { - "commandSucceededEvent": { - "commandName": "insert" - } - } - ] - } - ] - } - ] -} diff --git a/test/spec/auth/unified/reauthenticate_with_retry.yml b/test/spec/auth/unified/reauthenticate_with_retry.yml deleted file mode 100644 index bf7cb56f3c8..00000000000 --- a/test/spec/auth/unified/reauthenticate_with_retry.yml +++ /dev/null @@ -1,104 +0,0 @@ ---- -description: reauthenticate_with_retry -schemaVersion: '1.12' -runOnRequirements: -- minServerVersion: '6.3' - auth: true -createEntities: -- client: - id: client0 - uriOptions: - retryReads: true - retryWrites: true - observeEvents: - - commandStartedEvent - - commandSucceededEvent - - commandFailedEvent -- database: - id: database0 - client: client0 - databaseName: db -- collection: - id: collection0 - database: database0 - collectionName: collName -initialData: -- collectionName: collName - databaseName: db - documents: [] -tests: -- description: Read command should reauthenticate when receive ReauthenticationRequired - error code and retryReads=true - operations: - - name: failPoint - object: testRunner - arguments: - client: client0 - failPoint: - configureFailPoint: failCommand - mode: - times: 1 - data: - failCommands: - - find - errorCode: 391 - - name: find - arguments: - filter: {} - object: collection0 - expectResult: [] - expectEvents: - - client: client0 - events: - - commandStartedEvent: - command: - find: collName - filter: {} - - commandFailedEvent: - commandName: find - - commandStartedEvent: - command: - find: collName - filter: {} - - commandSucceededEvent: - commandName: find -- description: Write command should reauthenticate when receive ReauthenticationRequired - error code and retryWrites=true - operations: - - name: failPoint - object: testRunner - arguments: - client: client0 - failPoint: - configureFailPoint: failCommand - mode: - times: 1 - data: - failCommands: - - insert - errorCode: 391 - - name: insertOne - object: collection0 - arguments: - document: - _id: 1 - x: 1 - expectEvents: - - client: client0 - events: - - commandStartedEvent: - command: - insert: collName - documents: - - _id: 1 - x: 1 - - commandFailedEvent: - commandName: insert - - commandStartedEvent: - command: - insert: collName - documents: - - _id: 1 - x: 1 - - commandSucceededEvent: - commandName: insert diff --git a/test/spec/auth/unified/reauthenticate_without_retry.json b/test/spec/auth/unified/reauthenticate_without_retry.json deleted file mode 100644 index 6fded476344..00000000000 --- a/test/spec/auth/unified/reauthenticate_without_retry.json +++ /dev/null @@ -1,191 +0,0 @@ -{ - "description": "reauthenticate_without_retry", - "schemaVersion": "1.12", - "runOnRequirements": [ - { - "minServerVersion": "6.3", - "auth": true - } - ], - "createEntities": [ - { - "client": { - "id": "client0", - "uriOptions": { - "retryReads": false, - "retryWrites": false - }, - "observeEvents": [ - "commandStartedEvent", - "commandSucceededEvent", - "commandFailedEvent" - ] - } - }, - { - "database": { - "id": "database0", - "client": "client0", - "databaseName": "db" - } - }, - { - "collection": { - "id": "collection0", - "database": "database0", - "collectionName": "collName" - } - } - ], - "initialData": [ - { - "collectionName": "collName", - "databaseName": "db", - "documents": [] - } - ], - "tests": [ - { - "description": "Read command should reauthenticate when receive ReauthenticationRequired error code and retryReads=false", - "operations": [ - { - "name": "failPoint", - "object": "testRunner", - "arguments": { - "client": "client0", - "failPoint": { - "configureFailPoint": "failCommand", - "mode": { - "times": 1 - }, - "data": { - "failCommands": [ - "find" - ], - "errorCode": 391 - } - } - } - }, - { - "name": "find", - "arguments": { - "filter": {} - }, - "object": "collection0", - "expectResult": [] - } - ], - "expectEvents": [ - { - "client": "client0", - "events": [ - { - "commandStartedEvent": { - "command": { - "find": "collName", - "filter": {} - } - } - }, - { - "commandFailedEvent": { - "commandName": "find" - } - }, - { - "commandStartedEvent": { - "command": { - "find": "collName", - "filter": {} - } - } - }, - { - "commandSucceededEvent": { - "commandName": "find" - } - } - ] - } - ] - }, - { - "description": "Write command should reauthenticate when receive ReauthenticationRequired error code and retryWrites=false", - "operations": [ - { - "name": "failPoint", - "object": "testRunner", - "arguments": { - "client": "client0", - "failPoint": { - "configureFailPoint": "failCommand", - "mode": { - "times": 1 - }, - "data": { - "failCommands": [ - "insert" - ], - "errorCode": 391 - } - } - } - }, - { - "name": "insertOne", - "object": "collection0", - "arguments": { - "document": { - "_id": 1, - "x": 1 - } - } - } - ], - "expectEvents": [ - { - "client": "client0", - "events": [ - { - "commandStartedEvent": { - "command": { - "insert": "collName", - "documents": [ - { - "_id": 1, - "x": 1 - } - ] - } - } - }, - { - "commandFailedEvent": { - "commandName": "insert" - } - }, - { - "commandStartedEvent": { - "command": { - "insert": "collName", - "documents": [ - { - "_id": 1, - "x": 1 - } - ] - } - } - }, - { - "commandSucceededEvent": { - "commandName": "insert" - } - } - ] - } - ] - } - ] -} diff --git a/test/spec/auth/unified/reauthenticate_without_retry.yml b/test/spec/auth/unified/reauthenticate_without_retry.yml deleted file mode 100644 index 394c4be91e0..00000000000 --- a/test/spec/auth/unified/reauthenticate_without_retry.yml +++ /dev/null @@ -1,104 +0,0 @@ ---- -description: reauthenticate_without_retry -schemaVersion: '1.13' -runOnRequirements: -- minServerVersion: '6.3' - auth: true -createEntities: -- client: - id: client0 - uriOptions: - retryReads: false - retryWrites: false - observeEvents: - - commandStartedEvent - - commandSucceededEvent - - commandFailedEvent -- database: - id: database0 - client: client0 - databaseName: db -- collection: - id: collection0 - database: database0 - collectionName: collName -initialData: -- collectionName: collName - databaseName: db - documents: [] -tests: -- description: Read command should reauthenticate when receive ReauthenticationRequired - error code and retryReads=false - operations: - - name: failPoint - object: testRunner - arguments: - client: client0 - failPoint: - configureFailPoint: failCommand - mode: - times: 1 - data: - failCommands: - - find - errorCode: 391 - - name: find - arguments: - filter: {} - object: collection0 - expectResult: [] - expectEvents: - - client: client0 - events: - - commandStartedEvent: - command: - find: collName - filter: {} - - commandFailedEvent: - commandName: find - - commandStartedEvent: - command: - find: collName - filter: {} - - commandSucceededEvent: - commandName: find -- description: Write command should reauthenticate when receive ReauthenticationRequired - error code and retryWrites=false - operations: - - name: failPoint - object: testRunner - arguments: - client: client0 - failPoint: - configureFailPoint: failCommand - mode: - times: 1 - data: - failCommands: - - insert - errorCode: 391 - - name: insertOne - object: collection0 - arguments: - document: - _id: 1 - x: 1 - expectEvents: - - client: client0 - events: - - commandStartedEvent: - command: - insert: collName - documents: - - _id: 1 - x: 1 - - commandFailedEvent: - commandName: insert - - commandStartedEvent: - command: - insert: collName - documents: - - _id: 1 - x: 1 - - commandSucceededEvent: - commandName: insert diff --git a/test/tools/runner/config.ts b/test/tools/runner/config.ts index ab2a4d519e4..a27790b207d 100644 --- a/test/tools/runner/config.ts +++ b/test/tools/runner/config.ts @@ -78,6 +78,7 @@ export class TestConfiguration { }; serverApi: ServerApi; activeResources: number; + isSrv: boolean; constructor(private uri: string, private context: Record) { const url = new ConnectionString(uri); @@ -92,6 +93,7 @@ export class TestConfiguration { this.topologyType = this.isLoadBalanced ? TopologyType.LoadBalanced : context.topologyType; this.buildInfo = context.buildInfo; this.serverApi = context.serverApi; + this.isSrv = uri.indexOf('mongodb+srv') > -1; this.options = { hosts, hostAddresses, @@ -159,8 +161,9 @@ export class TestConfiguration { return this.options.replicaSet; } - isAzureOIDC(uri: string): boolean { - return uri.indexOf('MONGODB-OIDC') > -1 && uri.indexOf('PROVIDER_NAME:azure') > -1; + isOIDC(uri: string, env: string): boolean { + if (!uri) return false; + return uri.indexOf('MONGODB-OIDC') > -1 && uri.indexOf(`ENVIRONMENT:${env}`) > -1; } newClient(urlOrQueryOptions?: string | Record, serverOptions?: Record) { @@ -347,6 +350,11 @@ export class TestConfiguration { url.searchParams.append('authSource', 'admin'); } + // Secrets setup for OIDC always sets the workload URI as MONGODB_URI_SINGLE. + if (process.env.MONGODB_URI_SINGLE?.includes('MONGODB-OIDC')) { + return process.env.MONGODB_URI_SINGLE; + } + const connectionString = url.toString().replace(FILLER_HOST, actualHostsString); return connectionString; diff --git a/test/tools/runner/hooks/configuration.js b/test/tools/runner/hooks/configuration.js index e947a6f069d..1db57745eef 100644 --- a/test/tools/runner/hooks/configuration.js +++ b/test/tools/runner/hooks/configuration.js @@ -113,13 +113,6 @@ const testConfigBeforeHook = async function () { this.configuration = new AstrolabeTestConfiguration(process.env.DRIVERS_ATLAS_TESTING_URI, {}); return; } - // TODO(NODE-5035): Implement OIDC support. Creating the MongoClient will fail - // with "MongoInvalidArgumentError: AuthMechanism 'MONGODB-OIDC' not supported" - // as is expected until that ticket goes in. Then this condition gets removed. - if (MONGODB_URI && MONGODB_URI.includes('MONGODB-OIDC')) { - this.configuration = new TestConfiguration(MONGODB_URI, {}); - return; - } const client = new MongoClient(loadBalanced ? SINGLE_MONGOS_LB_URI : MONGODB_URI, { ...getEnvironmentalOptions(), @@ -172,7 +165,7 @@ const testConfigBeforeHook = async function () { atlas: process.env.ATLAS_CONNECTIVITY != null, aws: MONGODB_URI.includes('authMechanism=MONGODB-AWS'), awsSdk: process.env.MONGODB_AWS_SDK, - azure: MONGODB_URI.includes('PROVIDER_NAME:azure'), + azure: MONGODB_URI.includes('ENVIRONMENT:azure'), adl: this.configuration.buildInfo.dataLake ? this.configuration.buildInfo.dataLake.version : false, diff --git a/test/tools/unified-spec-runner/entities.ts b/test/tools/unified-spec-runner/entities.ts index 4b7e4f55b14..3289a2932d2 100644 --- a/test/tools/unified-spec-runner/entities.ts +++ b/test/tools/unified-spec-runner/entities.ts @@ -350,6 +350,10 @@ export class UnifiedMongoClient extends MongoClient { } export class FailPointMap extends Map { + constructor() { + super(); + } + async enableFailPoint( addressOrClient: string | HostAddress | UnifiedMongoClient, failPoint: Document @@ -567,10 +571,13 @@ export class EntitiesMap extends Map { const useMultipleMongoses = (config.topologyType === 'LoadBalanced' || config.topologyType === 'Sharded') && entity.client.useMultipleMongoses; - const uri = makeConnectionString( - config.url({ useMultipleMongoses }), - entity.client.uriOptions - ); + let uri: string; + // For OIDC we need to ensure we use MONGODB_URI_SINGLE for the MongoClient. + if (process.env.MONGODB_URI_SINGLE?.includes('MONGODB-OIDC')) { + uri = makeConnectionString(process.env.MONGODB_URI_SINGLE, entity.client.uriOptions); + } else { + uri = makeConnectionString(config.url({ useMultipleMongoses }), entity.client.uriOptions); + } const client = new UnifiedMongoClient(uri, entity.client); new EntityEventRegistry(client, entity.client, map).register(); try { diff --git a/test/tools/unified-spec-runner/runner.ts b/test/tools/unified-spec-runner/runner.ts index b49b6aa5826..721d8497ce6 100644 --- a/test/tools/unified-spec-runner/runner.ts +++ b/test/tools/unified-spec-runner/runner.ts @@ -73,6 +73,14 @@ async function runUnifiedTest( if (ctx.configuration.isLoadBalanced) { // The util client can always point at the single mongos LB frontend. utilClient = ctx.configuration.newClient(ctx.configuration.singleMongosLoadBalancerUri); + } else if (process.env.UTIL_CLIENT_USER && process.env.UTIL_CLIENT_PASSWORD) { + // For OIDC tests the MONGODB_URI is the base admin URI that the util client will use. + utilClient = ctx.configuration.newClient(process.env.MONGODB_URI, { + auth: { + username: process.env.UTIL_CLIENT_USER, + password: process.env.UTIL_CLIENT_PASSWORD + } + }); } else { utilClient = ctx.configuration.newClient(); } diff --git a/test/tools/unified-spec-runner/schema.ts b/test/tools/unified-spec-runner/schema.ts index 6fceee9a6a5..ea331ce6911 100644 --- a/test/tools/unified-spec-runner/schema.ts +++ b/test/tools/unified-spec-runner/schema.ts @@ -108,6 +108,7 @@ export type TopologyName = (typeof TopologyName)[keyof typeof TopologyName]; export interface RunOnRequirement { serverless?: 'forbid' | 'allow' | 'require'; auth?: boolean; + authMechanism?: string; maxServerVersion?: string; minServerVersion?: string; topologies?: TopologyName[]; diff --git a/test/tools/unified-spec-runner/unified-utils.ts b/test/tools/unified-spec-runner/unified-utils.ts index 233274b2925..4519a509839 100644 --- a/test/tools/unified-spec-runner/unified-utils.ts +++ b/test/tools/unified-spec-runner/unified-utils.ts @@ -100,6 +100,13 @@ export async function topologySatisfies( if (!ok && skipReason == null) { skipReason = `requires auth but auth is not enabled`; } + if ( + r.authMechanism && + !config.parameters.authenticationMechanisms.includes(r.authMechanism) + ) { + ok &&= false; + skipReason = `requires ${r.authMechanism} to be supported by the server`; + } } else if (r.auth === false) { ok &&= process.env.AUTH === 'noauth' || process.env.AUTH == null; if (!ok && skipReason == null) skipReason = `requires no auth but auth is enabled`; @@ -203,7 +210,12 @@ export function makeConnectionString( ): string { const connectionString = new ConnectionString(uri); for (const [name, value] of Object.entries(uriOptions ?? {})) { - connectionString.searchParams.set(name, String(value)); + if (name === 'authMechanismProperties' && '$$placeholder' in (value as any)) { + // This is a no-op - we want to ignore setting this as the URI in the + // environment already has the auth mech property set. + } else { + connectionString.searchParams.set(name, String(value)); + } } return connectionString.toString(); } diff --git a/test/tools/uri_spec_runner.ts b/test/tools/uri_spec_runner.ts index 844e5bd4705..8502fff3c44 100644 --- a/test/tools/uri_spec_runner.ts +++ b/test/tools/uri_spec_runner.ts @@ -1,6 +1,12 @@ import { expect } from 'chai'; -import { MongoAPIError, MongoClient, MongoParseError, MongoRuntimeError } from '../mongodb'; +import { + MongoAPIError, + MongoClient, + MongoInvalidArgumentError, + MongoParseError, + MongoRuntimeError +} from '../mongodb'; type HostObject = { type: 'ipv4' | 'ip_literal' | 'hostname' | 'unix'; @@ -69,7 +75,9 @@ export function executeUriValidationTest( new MongoClient(test.uri); expect.fail(`Expected "${test.uri}" to be invalid${test.valid ? ' because of warning' : ''}`); } catch (err) { - if (err instanceof MongoRuntimeError) { + if (err instanceof MongoInvalidArgumentError) { + // Azure URI errors don't have an underlying cause. + } else if (err instanceof MongoRuntimeError) { expect(err).to.have.nested.property('cause.code').equal('ERR_INVALID_URL'); } else if ( // most of our validation is MongoParseError, which does not extend from MongoAPIError @@ -91,15 +99,11 @@ export function executeUriValidationTest( const CALLBACKS = { oidcRequest: async () => { return { accessToken: '' }; - }, - oidcRefresh: async () => { - return { accessToken: '' }; } }; const CALLBACK_MAPPINGS = { - oidcRequest: 'REQUEST_TOKEN_CALLBACK', - oidcRefresh: 'REFRESH_TOKEN_CALLBACK' + oidcRequest: 'OIDC_TOKEN_CALLBACK' }; const mongoClientOptions = {}; @@ -223,10 +227,7 @@ export function executeUriValidationTest( // TODO(NODE-3925): Ensure default SERVICE_NAME is set on the parsed mechanism properties continue; } - if ( - expectedMechProp === 'REQUEST_TOKEN_CALLBACK' || - expectedMechProp === 'REFRESH_TOKEN_CALLBACK' - ) { + if (expectedMechProp === 'OIDC_TOKEN_CALLBACK') { expect( options, `${errorMessage} credentials.mechanismProperties.${expectedMechProp}` diff --git a/test/unit/client-side-encryption/providers/credentialsProvider.test.ts b/test/unit/client-side-encryption/providers/credentialsProvider.test.ts index 486fb41c60e..a21ac96ef33 100644 --- a/test/unit/client-side-encryption/providers/credentialsProvider.test.ts +++ b/test/unit/client-side-encryption/providers/credentialsProvider.test.ts @@ -3,10 +3,7 @@ import * as http from 'http'; import * as sinon from 'sinon'; // eslint-disable-next-line @typescript-eslint/no-restricted-imports -import { - MongoCryptAzureKMSRequestError, - MongoCryptKMSRequestNetworkTimeoutError -} from '../../../../src/client-side-encryption/errors'; +import { MongoCryptAzureKMSRequestError } from '../../../../src/client-side-encryption/errors'; // eslint-disable-next-line @typescript-eslint/no-restricted-imports import { isEmptyCredentials, @@ -19,9 +16,10 @@ import { tokenCache } from '../../../../src/client-side-encryption/providers/azure'; // eslint-disable-next-line @typescript-eslint/no-restricted-imports -import * as utils from '../../../../src/client-side-encryption/providers/utils'; -// eslint-disable-next-line @typescript-eslint/no-restricted-imports import { AWSSDKCredentialProvider } from '../../../../src/cmap/auth/aws_temporary_credentials'; +// eslint-disable-next-line @typescript-eslint/no-restricted-imports +import * as utils from '../../../../src/utils'; +import { MongoNetworkTimeoutError } from '../../../mongodb'; import * as requirements from '../requirements.helper'; const originalAccessKeyId = process.env.AWS_ACCESS_KEY_ID; @@ -413,18 +411,6 @@ describe('#refreshKMSCredentials', function () { }); }); - it('allows a custom URL to be specified', () => { - const url = httpSpy.args[0][0]; - expect(url).to.be.instanceof(URL); - expect(url.toString()).to.include('http://customentpoint.com'); - }); - - it('deep copies the provided url', () => { - const spiedUrl = httpSpy.args[0][0]; - expect(spiedUrl).to.be.instanceof(URL); - expect(spiedUrl).to.not.equal(url); - }); - it('allows custom headers to be specified', () => { const options = httpSpy.args[0][1]; expect(options).to.have.property('headers').to.have.property('customHeader1', 'value1'); @@ -437,9 +423,7 @@ describe('#refreshKMSCredentials', function () { afterEach(() => sinon.restore()); context('when the request times out', () => { before(() => { - sinon - .stub(utils, 'get') - .rejects(new MongoCryptKMSRequestNetworkTimeoutError('request timed out')); + sinon.stub(utils, 'get').rejects(new MongoNetworkTimeoutError('request timed out')); }); it('throws a MongoCryptKMSRequestError', async () => { diff --git a/test/unit/cmap/auth/mongodb_oidc.test.ts b/test/unit/cmap/auth/mongodb_oidc.test.ts deleted file mode 100644 index 121244688e9..00000000000 --- a/test/unit/cmap/auth/mongodb_oidc.test.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { expect } from 'chai'; - -import { - AuthContext, - MongoCredentials, - MongoDBOIDC, - MongoInvalidArgumentError -} from '../../../mongodb'; - -describe('class MongoDBOIDC', () => { - context('when an unknown OIDC provider name is set', () => { - it('prepare rejects with MongoInvalidArgumentError', async () => { - const oidc = new MongoDBOIDC(); - const error = await oidc - .prepare( - {}, - new AuthContext( - {}, - new MongoCredentials({ - mechanism: 'MONGODB-OIDC', - mechanismProperties: { PROVIDER_NAME: 'iLoveJavaScript' } - }), - {} - ) - ) - .catch(error => error); - - expect(error).to.be.instanceOf(MongoInvalidArgumentError); - expect(error).to.match(/workflow for provider/); - }); - - it('auth rejects with MongoInvalidArgumentError', async () => { - const oidc = new MongoDBOIDC(); - const error = await oidc - .auth( - new AuthContext( - {}, - new MongoCredentials({ - mechanism: 'MONGODB-OIDC', - mechanismProperties: { PROVIDER_NAME: 'iLoveJavaScript' } - }), - {} - ) - ) - .catch(error => error); - - expect(error).to.be.instanceOf(MongoInvalidArgumentError); - expect(error).to.match(/workflow for provider/); - }); - }); -}); diff --git a/test/unit/cmap/auth/mongodb_oidc/aws_service_workflow.test.ts b/test/unit/cmap/auth/mongodb_oidc/aws_service_workflow.test.ts deleted file mode 100644 index 55438240e7f..00000000000 --- a/test/unit/cmap/auth/mongodb_oidc/aws_service_workflow.test.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { expect } from 'chai'; -import * as sinon from 'sinon'; - -import { AwsServiceWorkflow, Connection, MongoCredentials } from '../../../../mongodb'; - -describe('AwsDeviceWorkFlow', function () { - describe('#execute', function () { - const workflow = new AwsServiceWorkflow(); - - context('when AWS_WEB_IDENTITY_TOKEN_FILE is not in the env', function () { - let file; - const connection = sinon.createStubInstance(Connection); - const credentials = sinon.createStubInstance(MongoCredentials); - - before(function () { - file = process.env.AWS_WEB_IDENTITY_TOKEN_FILE; - delete process.env.AWS_WEB_IDENTITY_TOKEN_FILE; - }); - - after(function () { - process.env.AWS_WEB_IDENTITY_TOKEN_FILE = file; - }); - - it('throws an error', async function () { - try { - await workflow.execute(connection, credentials); - expect.fail('workflow must fail without AWS_WEB_IDENTITY_TOKEN_FILE'); - } catch (error) { - expect(error.message).to.include('AWS_WEB_IDENTITY_TOKEN_FILE'); - } - }); - }); - }); -}); diff --git a/test/unit/cmap/auth/mongodb_oidc/azure_machine_workflow.test.ts b/test/unit/cmap/auth/mongodb_oidc/azure_machine_workflow.test.ts new file mode 100644 index 00000000000..b60c4f045da --- /dev/null +++ b/test/unit/cmap/auth/mongodb_oidc/azure_machine_workflow.test.ts @@ -0,0 +1,22 @@ +import { expect } from 'chai'; +import * as sinon from 'sinon'; + +// eslint-disable-next-line @typescript-eslint/no-restricted-imports +import { TokenCache } from '../../../../../src/cmap/auth/mongodb_oidc/token_cache'; +import { AzureMachineWorkflow, Connection, MongoCredentials } from '../../../../mongodb'; + +describe('AzureMachineFlow', function () { + describe('#execute', function () { + const workflow = new AzureMachineWorkflow(new TokenCache()); + + context('when TOKEN_RESOURCE is not set', function () { + const connection = sinon.createStubInstance(Connection); + const credentials = sinon.createStubInstance(MongoCredentials); + + it('throws an error', async function () { + const error = await workflow.execute(connection, credentials).catch(error => error); + expect(error.message).to.include('TOKEN_RESOURCE'); + }); + }); + }); +}); diff --git a/test/unit/cmap/auth/mongodb_oidc/azure_token_cache.test.ts b/test/unit/cmap/auth/mongodb_oidc/azure_token_cache.test.ts deleted file mode 100644 index ac95eb8a9c3..00000000000 --- a/test/unit/cmap/auth/mongodb_oidc/azure_token_cache.test.ts +++ /dev/null @@ -1,77 +0,0 @@ -import { expect } from 'chai'; - -import { AzureTokenCache } from '../../../../mongodb'; - -describe('AzureTokenCache', function () { - const tokenResultWithExpiration = Object.freeze({ - access_token: 'test', - expires_in: 100 - }); - - describe('#addEntry', function () { - context('when expiresInSeconds is provided', function () { - const cache = new AzureTokenCache(); - let entry; - - before(function () { - cache.addEntry('audience', tokenResultWithExpiration); - entry = cache.getEntry('audience'); - }); - - it('adds the token result', function () { - expect(entry.token).to.equal('test'); - }); - - it('creates an expiration', function () { - expect(entry.expiration).to.be.within(Date.now(), Date.now() + 100 * 1000); - }); - }); - }); - - describe('#clear', function () { - const cache = new AzureTokenCache(); - - before(function () { - cache.addEntry('audience', tokenResultWithExpiration); - cache.clear(); - }); - - it('clears the cache', function () { - expect(cache.entries.size).to.equal(0); - }); - }); - - describe('#deleteEntry', function () { - const cache = new AzureTokenCache(); - - before(function () { - cache.addEntry('audience', tokenResultWithExpiration); - cache.deleteEntry('audience'); - }); - - it('deletes the entry', function () { - expect(cache.getEntry('audience')).to.not.exist; - }); - }); - - describe('#getEntry', function () { - const cache = new AzureTokenCache(); - - before(function () { - cache.addEntry('audience1', tokenResultWithExpiration); - cache.addEntry('audience2', tokenResultWithExpiration); - }); - - context('when there is a matching entry', function () { - it('returns the entry', function () { - expect(cache.getEntry('audience1')?.token).to.equal('test'); - }); - }); - - context('when there is no matching entry', function () { - it('returns undefined', function () { - expect(cache.getEntry('audience')).to.equal(undefined); - }); - }); - }); -}); diff --git a/test/unit/cmap/auth/mongodb_oidc/callback_lock_cache.test.ts b/test/unit/cmap/auth/mongodb_oidc/callback_lock_cache.test.ts deleted file mode 100644 index d10490fa5b0..00000000000 --- a/test/unit/cmap/auth/mongodb_oidc/callback_lock_cache.test.ts +++ /dev/null @@ -1,145 +0,0 @@ -import { expect } from 'chai'; -import * as sinon from 'sinon'; - -import { - CallbackLockCache, - Connection, - MongoCredentials, - MongoInvalidArgumentError -} from '../../../../mongodb'; -import { sleep } from '../../../../tools/utils'; - -describe('CallbackLockCache', function () { - describe('#getCallbacks', function () { - const connection = sinon.createStubInstance(Connection); - connection.address = 'localhost:27017'; - - context('when a request callback does not exist', function () { - const credentials = new MongoCredentials({ - username: 'test_user', - password: 'pwd', - source: '$external', - mechanismProperties: {} - }); - const cache = new CallbackLockCache(); - - it('raises an error', function () { - try { - cache.getEntry(connection, credentials); - expect.fail('Must raise error when no request callback exists.'); - } catch (error) { - expect(error).to.be.instanceOf(MongoInvalidArgumentError); - expect(error.message).to.include( - 'Auth mechanism property REQUEST_TOKEN_CALLBACK is required' - ); - } - }); - }); - - context('when no entry exists in the cache', function () { - context('when a refresh callback exists', function () { - let requestCount = 0; - let refreshCount = 0; - - const request = async () => { - requestCount++; - if (requestCount > 1) { - throw new Error('Cannot execute request simultaneously.'); - } - await sleep(1000); - requestCount--; - return { accessToken: '' }; - }; - const refresh = async () => { - refreshCount++; - if (refreshCount > 1) { - throw new Error('Cannot execute refresh simultaneously.'); - } - await sleep(1000); - refreshCount--; - return Promise.resolve({ accessToken: '' }); - }; - const requestSpy = sinon.spy(request); - const refreshSpy = sinon.spy(refresh); - const credentials = new MongoCredentials({ - username: 'test_user', - password: 'pwd', - source: '$external', - mechanismProperties: { - REQUEST_TOKEN_CALLBACK: requestSpy, - REFRESH_TOKEN_CALLBACK: refreshSpy - } - }); - const cache = new CallbackLockCache(); - const { requestCallback, refreshCallback, callbackHash } = cache.getEntry( - connection, - credentials - ); - - it('puts a new entry in the cache', function () { - expect(cache.entries).to.have.lengthOf(1); - }); - - it('returns the new entry', function () { - expect(requestCallback).to.exist; - expect(refreshCallback).to.exist; - expect(callbackHash).to.exist; - }); - - it('locks the callbacks', async function () { - await Promise.allSettled([ - requestCallback(), - requestCallback(), - refreshCallback(), - refreshCallback() - ]); - expect(requestSpy).to.have.been.calledTwice; - expect(refreshSpy).to.have.been.calledTwice; - }); - }); - - context('when a refresh function does not exist', function () { - let requestCount = 0; - - const request = async () => { - requestCount++; - if (requestCount > 1) { - throw new Error('Cannot execute request simultaneously.'); - } - await sleep(1000); - requestCount--; - return Promise.resolve({ accessToken: '' }); - }; - const requestSpy = sinon.spy(request); - const credentials = new MongoCredentials({ - username: 'test_user', - password: 'pwd', - source: '$external', - mechanismProperties: { - REQUEST_TOKEN_CALLBACK: requestSpy - } - }); - const cache = new CallbackLockCache(); - const { requestCallback, refreshCallback, callbackHash } = cache.getEntry( - connection, - credentials - ); - - it('puts a new entry in the cache', function () { - expect(cache.entries).to.have.lengthOf(1); - }); - - it('returns the new entry', function () { - expect(requestCallback).to.exist; - expect(refreshCallback).to.not.exist; - expect(callbackHash).to.exist; - }); - - it('locks the callbacks', async function () { - await Promise.allSettled([requestCallback(), requestCallback()]); - expect(requestSpy).to.have.been.calledTwice; - }); - }); - }); - }); -}); diff --git a/test/unit/cmap/auth/mongodb_oidc/gcp_machine_workflow.test.ts b/test/unit/cmap/auth/mongodb_oidc/gcp_machine_workflow.test.ts new file mode 100644 index 00000000000..4cdd2bb4b2e --- /dev/null +++ b/test/unit/cmap/auth/mongodb_oidc/gcp_machine_workflow.test.ts @@ -0,0 +1,22 @@ +import { expect } from 'chai'; +import * as sinon from 'sinon'; + +// eslint-disable-next-line @typescript-eslint/no-restricted-imports +import { TokenCache } from '../../../../../src/cmap/auth/mongodb_oidc/token_cache'; +import { Connection, GCPMachineWorkflow, MongoCredentials } from '../../../../mongodb'; + +describe('GCPMachineFlow', function () { + describe('#execute', function () { + const workflow = new GCPMachineWorkflow(new TokenCache()); + + context('when TOKEN_RESOURCE is not set', function () { + const connection = sinon.createStubInstance(Connection); + const credentials = sinon.createStubInstance(MongoCredentials); + + it('throws an error', async function () { + const error = await workflow.execute(connection, credentials).catch(error => error); + expect(error.message).to.include('TOKEN_RESOURCE'); + }); + }); + }); +}); diff --git a/test/unit/cmap/auth/mongodb_oidc/token_entry_cache.test.ts b/test/unit/cmap/auth/mongodb_oidc/token_entry_cache.test.ts deleted file mode 100644 index 90f3a940858..00000000000 --- a/test/unit/cmap/auth/mongodb_oidc/token_entry_cache.test.ts +++ /dev/null @@ -1,144 +0,0 @@ -import { expect } from 'chai'; - -import { type TokenEntry, TokenEntryCache } from '../../../../mongodb'; - -describe('TokenEntryCache', function () { - const tokenResultWithExpiration = Object.freeze({ - accessToken: 'test', - expiresInSeconds: 100 - }); - const serverResult = Object.freeze({ - issuer: 'test', - clientId: '1' - }); - const callbackHash = '1'; - - describe('#addEntry', function () { - context('when expiresInSeconds is provided', function () { - const cache = new TokenEntryCache(); - let entry; - - before(function () { - cache.addEntry('localhost', 'user', callbackHash, tokenResultWithExpiration, serverResult); - entry = cache.getEntry('localhost', 'user', callbackHash); - }); - - it('adds the token result', function () { - expect(entry.tokenResult).to.deep.equal(tokenResultWithExpiration); - }); - - it('adds the server result', function () { - expect(entry.serverInfo).to.deep.equal(serverResult); - }); - - it('creates an expiration', function () { - expect(entry.expiration).to.be.within(Date.now(), Date.now() + 100 * 1000); - }); - }); - - context('when expiresInSeconds is not provided', function () { - const cache = new TokenEntryCache(); - let entry: TokenEntry | undefined; - - const expiredResult = Object.freeze({ accessToken: 'test' }); - - before(function () { - cache.addEntry('localhost', 'user', callbackHash, expiredResult, serverResult); - entry = cache.getEntry('localhost', 'user', callbackHash); - }); - - it('sets an immediate expiration', function () { - expect(entry?.expiration).to.be.at.most(Date.now()); - }); - }); - - context('when expiresInSeconds is null', function () { - const cache = new TokenEntryCache(); - let entry: TokenEntry | undefined; - - const expiredResult = Object.freeze({ - accessToken: 'test', - expiredInSeconds: null - }); - - before(function () { - cache.addEntry('localhost', 'user', callbackHash, expiredResult, serverResult); - entry = cache.getEntry('localhost', 'user', callbackHash); - }); - - it('sets an immediate expiration', function () { - expect(entry?.expiration).to.be.at.most(Date.now()); - }); - }); - }); - - describe('#clear', function () { - const cache = new TokenEntryCache(); - - before(function () { - cache.addEntry('localhost', 'user', callbackHash, tokenResultWithExpiration, serverResult); - cache.clear(); - }); - - it('clears the cache', function () { - expect(cache.entries.size).to.equal(0); - }); - }); - - describe('#deleteExpiredEntries', function () { - const cache = new TokenEntryCache(); - - const nonExpiredResult = Object.freeze({ - accessToken: 'test', - expiresInSeconds: 600 - }); - - before(function () { - cache.addEntry('localhost', 'user', callbackHash, tokenResultWithExpiration, serverResult); - cache.addEntry('localhost', 'user2', callbackHash, nonExpiredResult, serverResult); - cache.deleteExpiredEntries(); - }); - - it('deletes all expired tokens from the cache 5 minutes before expiredInSeconds', function () { - expect(cache.entries.size).to.equal(1); - expect(cache.getEntry('localhost', 'user', callbackHash)).to.not.exist; - expect(cache.getEntry('localhost', 'user2', callbackHash)).to.exist; - }); - }); - - describe('#deleteEntry', function () { - const cache = new TokenEntryCache(); - - before(function () { - cache.addEntry('localhost', 'user', callbackHash, tokenResultWithExpiration, serverResult); - cache.deleteEntry('localhost', 'user', callbackHash); - }); - - it('deletes the entry', function () { - expect(cache.getEntry('localhost', 'user', callbackHash)).to.not.exist; - }); - }); - - describe('#getEntry', function () { - const cache = new TokenEntryCache(); - - before(function () { - cache.addEntry('localhost', 'user', callbackHash, tokenResultWithExpiration, serverResult); - cache.addEntry('localhost', 'user2', callbackHash, tokenResultWithExpiration, serverResult); - }); - - context('when there is a matching entry', function () { - it('returns the entry', function () { - expect(cache.getEntry('localhost', 'user', callbackHash)?.tokenResult).to.equal( - tokenResultWithExpiration - ); - }); - }); - - context('when there is no matching entry', function () { - it('returns undefined', function () { - expect(cache.getEntry('localhost', 'user1', callbackHash)).to.equal(undefined); - }); - }); - }); -}); diff --git a/test/unit/cmap/auth/mongodb_oidc/token_machine_workflow.test.ts b/test/unit/cmap/auth/mongodb_oidc/token_machine_workflow.test.ts new file mode 100644 index 00000000000..b0302d7f03e --- /dev/null +++ b/test/unit/cmap/auth/mongodb_oidc/token_machine_workflow.test.ts @@ -0,0 +1,34 @@ +import { expect } from 'chai'; +import * as sinon from 'sinon'; + +// eslint-disable-next-line @typescript-eslint/no-restricted-imports +import { TokenCache } from '../../../../../src/cmap/auth/mongodb_oidc/token_cache'; +import { Connection, MongoCredentials, TokenMachineWorkflow } from '../../../../mongodb'; + +describe('TokenMachineFlow', function () { + describe('#execute', function () { + const workflow = new TokenMachineWorkflow(new TokenCache()); + + context('when OIDC_TOKEN_FILE is not in the env', function () { + let file; + const connection = sinon.createStubInstance(Connection); + const credentials = sinon.createStubInstance(MongoCredentials); + + before(function () { + file = process.env.OIDC_TOKEN_FILE; + delete process.env.OIDC_TOKEN_FILE; + }); + + after(function () { + if (file) { + process.env.OIDC_TOKEN_FILE = file; + } + }); + + it('throws an error', async function () { + const error = await workflow.execute(connection, credentials).catch(error => error); + expect(error.message).to.include('OIDC_TOKEN_FILE'); + }); + }); + }); +}); diff --git a/test/unit/connection_string.test.ts b/test/unit/connection_string.test.ts index 2a38fc491ad..244273ef789 100644 --- a/test/unit/connection_string.test.ts +++ b/test/unit/connection_string.test.ts @@ -303,7 +303,7 @@ describe('Connection String', function () { it('raises an error', function () { expect(() => { parseOptions( - 'mongodb://localhost/?authMechanismProperties=PROVIDER_NAME:aws,ALLOWED_HOSTS:[localhost]&authMechanism=MONGODB-OIDC' + 'mongodb://localhost/?authMechanismProperties=ENVIRONMENT:test,ALLOWED_HOSTS:[localhost]&authMechanism=MONGODB-OIDC' ); }).to.throw( MongoParseError, @@ -318,7 +318,7 @@ describe('Connection String', function () { it('sets the allowed hosts property', function () { const options = parseOptions( - 'mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:aws', + 'mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:test', { authMechanismProperties: { ALLOWED_HOSTS: hosts @@ -326,7 +326,7 @@ describe('Connection String', function () { } ); expect(options.credentials.mechanismProperties).to.deep.equal({ - PROVIDER_NAME: 'aws', + ENVIRONMENT: 'test', ALLOWED_HOSTS: hosts }); }); @@ -336,7 +336,7 @@ describe('Connection String', function () { it('raises an error', function () { expect(() => { parseOptions( - 'mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:aws', + 'mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:test', { authMechanismProperties: { ALLOWED_HOSTS: [1, 2, 3] @@ -354,25 +354,25 @@ describe('Connection String', function () { context('when ALLOWED_HOSTS is not in the options', function () { it('sets the default value', function () { const options = parseOptions( - 'mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:aws' + 'mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:test' ); expect(options.credentials.mechanismProperties).to.deep.equal({ - PROVIDER_NAME: 'aws', + ENVIRONMENT: 'test', ALLOWED_HOSTS: DEFAULT_ALLOWED_HOSTS }); }); }); - context('when TOKEN_AUDIENCE is in the properties', function () { + context('when TOKEN_RESOURCE is in the properties', function () { context('when it is a uri', function () { const options = parseOptions( - 'mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=PROVIDER_NAME:azure,TOKEN_AUDIENCE:api%3A%2F%2Ftest' + 'mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:api%3A%2F%2Ftest' ); it('parses the uri', function () { expect(options.credentials.mechanismProperties).to.deep.equal({ - PROVIDER_NAME: 'azure', - TOKEN_AUDIENCE: 'api://test', + ENVIRONMENT: 'azure', + TOKEN_RESOURCE: 'api://test', ALLOWED_HOSTS: DEFAULT_ALLOWED_HOSTS }); }); @@ -655,7 +655,7 @@ describe('Connection String', function () { makeStub('authSource=thisShouldNotBeAuthSource'); const mechanismProperties = {}; if (mechanism === AuthMechanism.MONGODB_OIDC) { - mechanismProperties.PROVIDER_NAME = 'aws'; + mechanismProperties.ENVIRONMENT = 'test'; } const credentials = new MongoCredentials({ diff --git a/test/unit/index.test.ts b/test/unit/index.test.ts index 508f3d85c2a..6509568c018 100644 --- a/test/unit/index.test.ts +++ b/test/unit/index.test.ts @@ -86,6 +86,7 @@ const EXPECTED_EXPORTS = [ 'MongoError', 'MongoErrorLabel', 'MongoExpiredSessionError', + 'MongoGCPError', 'MongoGridFSChunkError', 'MongoGridFSStreamError', 'MongoInvalidArgumentError', @@ -95,6 +96,7 @@ const EXPECTED_EXPORTS = [ 'MongoNetworkError', 'MongoNetworkTimeoutError', 'MongoNotConnectedError', + 'MongoOIDCError', 'MongoParseError', 'MongoRuntimeError', 'MongoServerClosedError', diff --git a/test/unit/sdam/server_description.test.ts b/test/unit/sdam/server_description.test.ts index e91a0863bdc..14c3ae0dbda 100644 --- a/test/unit/sdam/server_description.test.ts +++ b/test/unit/sdam/server_description.test.ts @@ -130,6 +130,22 @@ describe('ServerDescription', function () { currentTv: { processId: processIdZero, counter: Long.fromNumber(2) }, newTv: { processId: processIdZero, counter: Long.fromNumber(2) }, out: 0 + }, + { + title: 'when process ids are equal and both counter values are zero bigints', + // @ts-expect-error: Testing that the function handles bigints + currentTv: { processId: processIdZero, counter: 0n }, + // @ts-expect-error: Testing that the function handles bigints + newTv: { processId: processIdZero, counter: 0n }, + out: 0 + }, + { + title: 'when process ids are equal and both counter values are non-zero bigints', + // @ts-expect-error: Testing that the function handles bigints + currentTv: { processId: processIdZero, counter: 2n }, + // @ts-expect-error: Testing that the function handles bigints + newTv: { processId: processIdZero, counter: 2n }, + out: 0 } ]; const compareTopologyVersionLessThanTests: CompareTopologyVersionTest[] = [ @@ -178,6 +194,12 @@ describe('ServerDescription', function () { currentTv: { processId: processIdZero, counter: Long.fromNumber(3) }, newTv: { processId: processIdZero, counter: Long.fromNumber(2) }, out: 1 + }, + { + title: 'when processIds are equal but new counter is less than current (bigint)', + currentTv: { processId: processIdZero, counter: 3n }, + newTv: { processId: processIdZero, counter: 2n }, + out: 1 } ];